diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000..bccbeed7a5 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,5 @@ +/contracts +.env.* +/linode +/frontend +/desktop diff --git a/.evans.toml b/.evans.toml new file mode 100644 index 0000000000..e60d2b79bc --- /dev/null +++ b/.evans.toml @@ -0,0 +1,3 @@ +[default] +protoFile = ["pkg/pb/bbgo.proto"] +package = "bbgo" diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 0000000000..97b0d46ebb --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,4 @@ +# These are supported funding model platforms + +github: c9s +custom: [https://etherscan.io/address/0x5ed8508e3d046cbe9ff44d6063e29349000c8ddd] diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml new file mode 100644 index 0000000000..4337241653 --- /dev/null +++ b/.github/workflows/docker.yml @@ -0,0 +1,53 @@ +name: Docker Image + +on: + workflow_dispatch: + inputs: + ref: + description: 'ref to build' + required: false + default: 'main' + push: + # Sequence of patterns matched against refs/tags + tags: + - "v*" # Push events to matching v*, i.e. v1.0, v20.15.10 + branches: + - "main" + +jobs: + docker: + runs-on: ubuntu-latest + steps: + - name: Docker meta + id: meta + uses: docker/metadata-action@v3 + with: + # list of Docker images to use as base name for tags + images: | + yoanlin/bbgo + # generate Docker tags based on the following events/attributes + tags: | + type=schedule + type=ref,event=branch + type=ref,event=pr + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{major}} + type=sha + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Login to DockerHub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Build and push + id: docker_build + uses: docker/build-push-action@v2 + with: + push: true + # tags: yoanlin/bbgo:latest + labels: ${{ steps.meta.outputs.labels }} + tags: ${{ steps.meta.outputs.tags }} diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml new file mode 100644 index 0000000000..333e2b285a --- /dev/null +++ b/.github/workflows/go.yml @@ -0,0 +1,90 @@ +name: Go + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + build: + runs-on: ubuntu-latest + + strategy: + matrix: + redis-version: + - 6.2 + env: + MYSQL_DATABASE: bbgo + MYSQL_USER: "root" + MYSQL_PASSWORD: "root" # pragma: allowlist secret + + steps: + + - uses: actions/checkout@v2 + + - uses: actions/cache@v2 + with: + path: | + ~/.cache/go-build + ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + + - name: Set up MySQL + run: | + sudo /etc/init.d/mysql start + mysql -e 'CREATE DATABASE ${{ env.MYSQL_DATABASE }};' -u${{ env.MYSQL_USER }} -p${{ env.MYSQL_PASSWORD }} + + - name: Set up redis + uses: shogo82148/actions-setup-redis@v1 + with: + redis-version: ${{ matrix.redis-version }} + # auto-start: "false" + + - name: Set up Go + uses: actions/setup-go@v3 + with: + go-version: 1.18 + + - name: Install Migration Tool + run: go install github.com/c9s/rockhopper/cmd/rockhopper@v1.2.1 + + - name: Test Migration SQL Files For MySQL + run: | + rockhopper --config rockhopper_mysql.yaml up + + - name: Test Migration SQL Files For SQLite + run: | + rockhopper --config rockhopper_sqlite.yaml up + + - name: Build + run: go build -v ./cmd/bbgo + + - name: Test + run: | + go test -race -coverprofile coverage.txt -covermode atomic ./pkg/... + sed -i -e '/_requestgen.go/d' coverage.txt + + - name: TestDnum + run: | + go test -race -coverprofile coverage_dnum.txt -covermode atomic -tags dnum ./pkg/... + sed -i -e '/_requestgen.go/d' coverage_dnum.txt + + - name: Revive Check + uses: morphy2k/revive-action@v2 + with: + reporter: github-pr-review + fail_on_error: true + + - name: Upload Coverage Report + uses: codecov/codecov-action@v3 + with: + files: ./coverage.txt,./coverage_dnum.txt + + - name: Create dotenv file + run: | + echo "DB_DRIVER=mysql" >> .env.local + echo "DB_DSN=root:root@/bbgo" >> .env.local + diff --git a/.github/workflows/golang-lint.yml b/.github/workflows/golang-lint.yml new file mode 100644 index 0000000000..420724cb71 --- /dev/null +++ b/.github/workflows/golang-lint.yml @@ -0,0 +1,22 @@ +name: golang-lint +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] +permissions: + contents: read + +jobs: + golangci: + name: lint + runs-on: ubuntu-latest + steps: + - uses: actions/setup-go@v3 + with: + go-version: 1.18 + - uses: actions/checkout@v3 + - name: golangci-lint + uses: golangci/golangci-lint-action@v3 + with: + version: v1.46.2 diff --git a/.github/workflows/node.yml b/.github/workflows/node.yml new file mode 100644 index 0000000000..d523b2a8a6 --- /dev/null +++ b/.github/workflows/node.yml @@ -0,0 +1,40 @@ +name: Node.js CI + +on: + push: + branches: [ main ] + paths: + - apps/backtest-report + - frontend + pull_request: + branches: [ main ] + paths: + - apps/backtest-report + - frontend + +jobs: + build: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [ 16.x ] + + steps: + - uses: actions/checkout@v3 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + - run: npm install -g yarn + - name: Install + run: yarn install + working-directory: "apps/backtest-report" + - name: Build + run: yarn run next build + working-directory: "apps/backtest-report" + - name: Export + run: yarn run next export + working-directory: "apps/backtest-report" + - run: yarn export + working-directory: "frontend" diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml new file mode 100644 index 0000000000..dde156d4e3 --- /dev/null +++ b/.github/workflows/python.yml @@ -0,0 +1,48 @@ +name: Python + +on: + push: + branches: [ main ] + paths: + - python + + pull_request: + branches: [ main ] + paths: + - python + +jobs: + + build: + runs-on: ubuntu-latest + + strategy: + matrix: + python-version: [ 3.8 ] + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install poetry + run: pip install poetry==1.1.13 + + - name: Install package + run: | + cd python + poetry install + + - name: Test + run: | + cd python + poetry run pytest -v -s tests + + - name: Lint + run: | + cd python + poetry run flake8 . diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000000..a23ef5ade7 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,193 @@ +on: + push: + # Sequence of patterns matched against refs/tags + tags: + - "v*" # Push events to matching v*, i.e. v1.0, v20.15.10 + +name: Create Release + +jobs: + release: + name: Create Release + runs-on: macos-latest + # runs-on: ubuntu-latest + steps: + - name: Get the version + id: get_version + run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//} + - name: Checkout code + uses: actions/checkout@v2 + - name: Install Go + uses: actions/setup-go@v2 + with: + go-version: 1.18 + - name: Install Node + uses: actions/setup-node@v2 + with: + node-version: "16" + - name: Build + run: | + CGO_ENABLED=1 go get github.com/mattn/go-sqlite3 + npm install --global yarn + make dist VERSION=${{ steps.get_version.outputs.VERSION }} + shell: bash + - name: Create Release + id: create_release + uses: actions/create-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag_name: ${{ github.ref }} + release_name: BBGO ${{ github.ref }} + body_path: doc/release/${{ steps.get_version.outputs.VERSION }}.md + # body: | + # ## Features + # - PR - title + # ## Fixes + # - (commit hash) title + draft: false + prerelease: false + - name: Upload slim Linux amd64 + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: dist/${{ steps.get_version.outputs.VERSION }}/bbgo-slim-${{ steps.get_version.outputs.VERSION }}-linux-amd64.tar.gz + asset_name: bbgo-slim-${{ steps.get_version.outputs.VERSION }}-linux-amd64.tar.gz + asset_content_type: application/octet-stream + - name: Upload slim Linux arm64 + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: dist/${{ steps.get_version.outputs.VERSION }}/bbgo-slim-${{ steps.get_version.outputs.VERSION }}-linux-arm64.tar.gz + asset_name: bbgo-slim-${{ steps.get_version.outputs.VERSION }}-linux-arm64.tar.gz + asset_content_type: application/octet-stream + - name: Upload slim Darwin amd64 + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: dist/${{ steps.get_version.outputs.VERSION }}/bbgo-slim-${{ steps.get_version.outputs.VERSION }}-darwin-amd64.tar.gz + asset_name: bbgo-slim-${{ steps.get_version.outputs.VERSION }}-darwin-amd64.tar.gz + asset_content_type: application/octet-stream + - name: Upload slim Darwin arm64 + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: dist/${{ steps.get_version.outputs.VERSION }}/bbgo-slim-${{ steps.get_version.outputs.VERSION }}-darwin-arm64.tar.gz + asset_name: bbgo-slim-${{ steps.get_version.outputs.VERSION }}-darwin-arm64.tar.gz + asset_content_type: application/octet-stream + - name: Upload Linux amd64 + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: dist/${{ steps.get_version.outputs.VERSION }}/bbgo-${{ steps.get_version.outputs.VERSION }}-linux-amd64.tar.gz + asset_name: bbgo-${{ steps.get_version.outputs.VERSION }}-linux-amd64.tar.gz + asset_content_type: application/octet-stream + - name: Upload Linux arm64 + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: dist/${{ steps.get_version.outputs.VERSION }}/bbgo-${{ steps.get_version.outputs.VERSION }}-linux-arm64.tar.gz + asset_name: bbgo-${{ steps.get_version.outputs.VERSION }}-linux-arm64.tar.gz + asset_content_type: application/octet-stream + - name: Upload Darwin amd64 + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: dist/${{ steps.get_version.outputs.VERSION }}/bbgo-${{ steps.get_version.outputs.VERSION }}-darwin-amd64.tar.gz + asset_name: bbgo-${{ steps.get_version.outputs.VERSION }}-darwin-amd64.tar.gz + asset_content_type: application/octet-stream + - name: Upload Darwin arm64 + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: dist/${{ steps.get_version.outputs.VERSION }}/bbgo-${{ steps.get_version.outputs.VERSION }}-darwin-arm64.tar.gz + asset_name: bbgo-${{ steps.get_version.outputs.VERSION }}-darwin-arm64.tar.gz + asset_content_type: application/octet-stream + - name: Upload slim Linux amd64 dnum + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: dist/${{ steps.get_version.outputs.VERSION }}/bbgo-slim-dnum-${{ steps.get_version.outputs.VERSION }}-linux-amd64.tar.gz + asset_name: bbgo-slim-dnum-${{ steps.get_version.outputs.VERSION }}-linux-amd64.tar.gz + asset_content_type: application/octet-stream + - name: Upload slim Linux arm64 dnum + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: dist/${{ steps.get_version.outputs.VERSION }}/bbgo-slim-dnum-${{ steps.get_version.outputs.VERSION }}-linux-arm64.tar.gz + asset_name: bbgo-slim-dnum-${{ steps.get_version.outputs.VERSION }}-linux-arm64.tar.gz + asset_content_type: application/octet-stream + - name: Upload slim Darwin amd64 dnum + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: dist/${{ steps.get_version.outputs.VERSION }}/bbgo-slim-dnum-${{ steps.get_version.outputs.VERSION }}-darwin-amd64.tar.gz + asset_name: bbgo-slim-dnum-${{ steps.get_version.outputs.VERSION }}-darwin-amd64.tar.gz + asset_content_type: application/octet-stream + - name: Upload slim Darwin arm64 dnum + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: dist/${{ steps.get_version.outputs.VERSION }}/bbgo-slim-dnum-${{ steps.get_version.outputs.VERSION }}-darwin-arm64.tar.gz + asset_name: bbgo-slim-dnum-${{ steps.get_version.outputs.VERSION }}-darwin-arm64.tar.gz + asset_content_type: application/octet-stream + - name: Upload Linux amd64 dnum + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: dist/${{ steps.get_version.outputs.VERSION }}/bbgo-dnum-${{ steps.get_version.outputs.VERSION }}-linux-amd64.tar.gz + asset_name: bbgo-dnum-${{ steps.get_version.outputs.VERSION }}-linux-amd64.tar.gz + asset_content_type: application/octet-stream + - name: Upload Linux arm64 dnum + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: dist/${{ steps.get_version.outputs.VERSION }}/bbgo-dnum-${{ steps.get_version.outputs.VERSION }}-linux-arm64.tar.gz + asset_name: bbgo-dnum-${{ steps.get_version.outputs.VERSION }}-linux-arm64.tar.gz + asset_content_type: application/octet-stream + - name: Upload Darwin amd64 dnum + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: dist/${{ steps.get_version.outputs.VERSION }}/bbgo-dnum-${{ steps.get_version.outputs.VERSION }}-darwin-amd64.tar.gz + asset_name: bbgo-dnum-${{ steps.get_version.outputs.VERSION }}-darwin-amd64.tar.gz + asset_content_type: application/octet-stream + - name: Upload Darwin arm64 dnum + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: dist/${{ steps.get_version.outputs.VERSION }}/bbgo-dnum-${{ steps.get_version.outputs.VERSION }}-darwin-arm64.tar.gz + asset_name: bbgo-dnum-${{ steps.get_version.outputs.VERSION }}-darwin-arm64.tar.gz + asset_content_type: application/octet-stream diff --git a/.gitignore b/.gitignore index d7b142ffb2..c1732e82fe 100644 --- a/.gitignore +++ b/.gitignore @@ -18,9 +18,11 @@ # Dependency directories (remove the comment below to include it) # vendor/ /.mod +/_mod /.env.local /.env.*.local +/.env.production .DS_Store @@ -30,4 +32,20 @@ /config/bbgo.yaml -/.mod +/pkg/server/assets.go + +bbgo.sqlite3 +node_modules +output + +otp*png + +/.deploy + +testoutput + +*.swp +/pkg/backtest/assets.go + +coverage.txt +coverage_dum.txt diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 0000000000..aba75da15a --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,9 @@ +run: + issues-exit-code: 0 + tests: true + timeout: 5m +linters: + disable-all: true + enable: + - gofmt + - gosimple diff --git a/.markdownlint.yaml b/.markdownlint.yaml new file mode 100644 index 0000000000..cd599ae329 --- /dev/null +++ b/.markdownlint.yaml @@ -0,0 +1,5 @@ +default: true +extends: null +MD033: false +MD010: false +MD013: false diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..cbb0418291 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,14 @@ +--- +repos: + # Secret Detection + - repo: https://github.com/Yelp/detect-secrets + rev: v1.2.0 + hooks: + - id: detect-secrets + args: ['--exclude-secrets', '3899a918953e01bfe218116cdfeccbed579e26275c4a89abcbc70d2cb9e9bbb8'] + exclude: pacakge.lock.json + # Markdown + - repo: https://github.com/igorshubovych/markdownlint-cli + rev: v0.31.1 + hooks: + - id: markdownlint diff --git a/.travis.yml b/.travis.yml index c36fe7efb5..0653603a73 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,9 +3,23 @@ language: go go: - 1.14 - 1.15 + services: - redis-server +- mysql + +before_install: +- mysql -e 'CREATE DATABASE bbgo;' +- mysql -e 'CREATE DATABASE bbgo_dev;' + +install: +- go get github.com/c9s/rockhopper/cmd/rockhopper + before_script: - go mod download +- make migrations + script: +- bash scripts/test-sqlite3-migrations.sh +- bash scripts/test-mysql-migrations.sh - go test -v ./pkg/... diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..b70133c9cb --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,128 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity +and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the + overall community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or + advances of any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email + address, without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +yoanlin93@gmail.com. +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series +of actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or +permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within +the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct +enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at +https://www.contributor-covenant.org/translations. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000..c92962cdcb --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,80 @@ +# Contributing + +Thank you for investing your time in contributing to our project! :sparkles:. + +Read our [Code of Conduct](./CODE_OF_CONDUCT.md) to keep our community approachable and respectable. + +In this guide you will get an overview of the contribution workflow from opening an issue, creating a PR, reviewing, and merging the PR. + +## Getting started + +### Issues + +#### Create a new issue + +If you spot a problem, search if an issue already exists. If a related issue doesn't exist, you can open a new issue using a relevant issue form. + +#### Solve an issue + +Scan through our [existing issues](https://github.com/c9s/bbgo/issues) to find one that interests you. +You can narrow down the search using `labels` as filters. As a general rule, we don’t assign issues to anyone. +If you find an issue to work on, you are welcome to open a PR with a fix. + +### Making Changes + +Install pre-commit to check your changes before you commit: + + pip install pre-commit + pre-commit install + pre-commit run markdownlint --files=README.md --verbose + pre-commit run detect-secrets --all-files --verbose + +See for more details. + +For new large features, such as integrating binance futures contracts, please propose a discussion first before you start working on it. + +For new small features, you could open a pull request directly. + +For each contributor, you have chance to receive the BBG token through the polygon network. + +Each issue has its BBG label, by completing the issue with a pull request, you can get correspond amount of BBG. + +## Support + +### By contributing pull requests + +Any pull request is welcome, documentation, format fixing, testing, features. + +### By registering account with referral ID + +You may register your exchange account with my referral ID to support this project. + +- For MAX Exchange: (default commission rate to your account) +- For Binance Exchange: (5% commission back to your account) + +### By small amount cryptos + +- BTC address `3J6XQJNWT56amqz9Hz2BEVQ7W4aNmb5kiU` +- USDT ERC20 address `0xeBcf7887A5b767DEb2e0C77E46A22c6Adc64E427` +- USDT POLYGON address `0xeBcf7887A5b767DEb2e0C77E46A22c6Adc64E427` + +### Buying BBG token + +BBGO issued a token BBG for the ecosystem (contract +address: on ethereum). + +If you have feature request, you can offer your BBG for contributors. + +BBG/ETH liquidity pool on Uniswap: + +BBG/MATIC pool on quickswap: https://quickswap.exchange/#/swap?outputCurrency=0x3Afe98235d680e8d7A52e1458a59D60f45F935C0 + +For further request, please contact us: + +## Community + +You can join our telegram channels: + +- BBGO International +- BBGO Taiwan + diff --git a/Dockerfile b/Dockerfile index 859b3966c6..cd9cca4ca0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,8 @@ # First stage container -FROM golang:1.15-alpine3.12 AS builder +FROM golang:1.17.6-alpine3.15 AS builder RUN apk add --no-cache git ca-certificates gcc libc-dev pkgconfig # gcc is for github.com/mattn/go-sqlite3 -RUN go get -u github.com/c9s/goose/cmd/goose # ADD . $GOPATH/src/github.com/c9s/bbgo -# WORKDIR $GOPATH/src/github.com/c9s/bbgo WORKDIR $GOPATH/src/github.com/c9s/bbgo ARG GO_MOD_CACHE @@ -12,17 +10,18 @@ ENV WORKDIR=$GOPATH/src/github.com/c9s/bbgo ENV GOPATH_ORIG=$GOPATH ENV GOPATH=${GO_MOD_CACHE:+$WORKDIR/$GO_MOD_CACHE} ENV GOPATH=${GOPATH:-$GOPATH_ORIG} +ENV CGO_ENABLED=1 +RUN cd $WORKDIR && go get github.com/mattn/go-sqlite3 ADD . . RUN go build -o $GOPATH_ORIG/bin/bbgo ./cmd/bbgo # Second stage container -FROM alpine:3.12 +FROM alpine:3.15 # RUN apk add --no-cache ca-certificates RUN mkdir /app WORKDIR /app -COPY --from=builder /go/bin/goose /usr/local/bin COPY --from=builder /go/bin/bbgo /usr/local/bin ENTRYPOINT ["/usr/local/bin/bbgo"] diff --git a/LICENSE b/LICENSE index fa0393889d..0ad25db4bd 100644 --- a/LICENSE +++ b/LICENSE @@ -1,21 +1,661 @@ -MIT License - -Copyright (c) 2020 Yo-An Lin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/Makefile b/Makefile index 9c2105df43..55c800e8ba 100644 --- a/Makefile +++ b/Makefile @@ -2,41 +2,275 @@ TARGET_ARCH ?= amd64 BUILD_DIR ?= build BIN_DIR := $(BUILD_DIR)/bbgo DIST_DIR ?= dist -GIT_DESC = $$(git describe --long --tags) +GIT_DESC := $(shell git describe --tags) -all: bbgo +VERSION ?= $(shell git describe --tags) + +OSX_APP_NAME = BBGO.app +OSX_APP_DIR = build/$(OSX_APP_NAME) +OSX_APP_CONTENTS_DIR = $(OSX_APP_DIR)/Contents +OSX_APP_RESOURCES_DIR = $(OSX_APP_CONTENTS_DIR)/Resources +OSX_APP_CODESIGN_IDENTITY ?= + +# OSX_APP_GUI ?= lorca +OSX_APP_GUI ?= webview + +FRONTEND_EXPORT_DIR = frontend/out + +BACKTEST_REPORT_APP_DIR = apps/backtest-report +BACKTEST_REPORT_EXPORT_DIR = apps/backtest-report/out + +all: bbgo-linux bbgo-darwin $(BIN_DIR): mkdir -p $@ -bin-dir: $(BIN_DIR) -bbgo-linux: $(BIN_DIR) - GOOS=linux GOARCH=$(TARGET_ARCH) go build -o $(BIN_DIR)/$@ ./cmd/bbgo +# build native bbgo +bbgo: static + go build -tags web,release -o $(BIN_DIR)/bbgo ./cmd/bbgo -bbgo-darwin: - GOOS=darwin GOARCH=$(TARGET_ARCH) go build -o $(BIN_DIR)/$@ ./cmd/bbgo +# build native bbgo (slim version) +bbgo-slim: + go build -tags release -o $(BIN_DIR)/$@ ./cmd/bbgo -bbgo: - go build -o $(BIN_DIR)/$@ ./cmd/$@ +# build cross-compile linux bbgo +bbgo-linux: bbgo-linux-amd64 bbgo-linux-arm64 -clean: - rm -rf $(BUILD_DIR) $(DIST_DIR) +bbgo-linux-amd64: $(BIN_DIR) pkg/server/assets.go + GOOS=linux GOARCH=amd64 go build -tags web,release -o $(BIN_DIR)/$@ ./cmd/bbgo + +bbgo-linux-arm64: $(BIN_DIR) pkg/server/assets.go + GOOS=linux GOARCH=arm64 go build -tags web,release -o $(BIN_DIR)/$@ ./cmd/bbgo + +# build cross-compile linux bbgo (slim version) +bbgo-slim-linux: bbgo-slim-linux-amd64 bbgo-slim-linux-arm64 + +bbgo-slim-linux-amd64: $(BIN_DIR) + GOOS=linux GOARCH=amd64 go build -tags release -o $(BIN_DIR)/$@ ./cmd/bbgo + +bbgo-slim-linux-arm64: $(BIN_DIR) + GOOS=linux GOARCH=arm64 go build -tags release -o $(BIN_DIR)/$@ ./cmd/bbgo + +bbgo-darwin: bbgo-darwin-arm64 bbgo-darwin-amd64 + +bbgo-darwin-arm64: $(BIN_DIR) pkg/server/assets.go + GOOS=darwin GOARCH=arm64 go build -tags web,release -o $(BIN_DIR)/$@ ./cmd/bbgo + +bbgo-darwin-amd64: $(BIN_DIR) pkg/server/assets.go + GOOS=darwin GOARCH=amd64 go build -tags web,release -o $(BIN_DIR)/$@ ./cmd/bbgo + +bbgo-slim-darwin-arm64: $(BIN_DIR) + GOOS=darwin GOARCH=arm64 go build -tags release -o $(BIN_DIR)/$@ ./cmd/bbgo + +bbgo-slim-darwin-amd64: $(BIN_DIR) + GOOS=darwin GOARCH=amd64 go build -tags release -o $(BIN_DIR)/$@ ./cmd/bbgo + +bbgo-slim-darwin: bbgo-slim-darwin-amd64 bbgo-slim-darwin-arm64 + +# build native bbgo +bbgo-dnum: static + go build -tags web,release,dnum -o $(BIN_DIR)/bbgo ./cmd/bbgo + +# build native bbgo (slim version) +bbgo-slim-dnum: + go build -tags release,dnum -o $(BIN_DIR)/$@ ./cmd/bbgo + +# build cross-compile linux bbgo +bbgo-dnum-linux: bbgo-dnum-linux-amd64 bbgo-dnum-linux-arm64 + +bbgo-dnum-linux-amd64: $(BIN_DIR) pkg/server/assets.go + GOOS=linux GOARCH=amd64 go build -tags web,release,dnum -o $(BIN_DIR)/$@ ./cmd/bbgo + +bbgo-dnum-linux-arm64: $(BIN_DIR) pkg/server/assets.go + GOOS=linux GOARCH=arm64 go build -tags web,release,dnum -o $(BIN_DIR)/$@ ./cmd/bbgo + +# build cross-compile linux bbgo (slim version) +bbgo-slim-dnum-linux: bbgo-slim-dnum-linux-amd64 bbgo-slim-dnum-linux-arm64 + +bbgo-slim-dnum-linux-amd64: $(BIN_DIR) + GOOS=linux GOARCH=amd64 go build -tags release,dnum -o $(BIN_DIR)/$@ ./cmd/bbgo + +bbgo-slim-dnum-linux-arm64: $(BIN_DIR) + GOOS=linux GOARCH=arm64 go build -tags release,dnum -o $(BIN_DIR)/$@ ./cmd/bbgo + +bbgo-dnum-darwin: bbgo-dnum-darwin-arm64 bbgo-dnum-darwin-amd64 + +bbgo-dnum-darwin-arm64: $(BIN_DIR) pkg/server/assets.go + GOOS=darwin GOARCH=arm64 go build -tags web,release,dnum -o $(BIN_DIR)/$@ ./cmd/bbgo + +bbgo-dnum-darwin-amd64: $(BIN_DIR) pkg/server/assets.go + GOOS=darwin GOARCH=amd64 go build -tags web,release,dnum -o $(BIN_DIR)/$@ ./cmd/bbgo + +bbgo-slim-dnum-darwin-arm64: $(BIN_DIR) + GOOS=darwin GOARCH=arm64 go build -tags release,dnum -o $(BIN_DIR)/$@ ./cmd/bbgo + +bbgo-slim-dnum-darwin-amd64: $(BIN_DIR) + GOOS=darwin GOARCH=amd64 go build -tags release,dnum -o $(BIN_DIR)/$@ ./cmd/bbgo + +bbgo-slim-dnum-darwin: bbgo-slim-dnum-darwin-amd64 bbgo-slim-dnum-darwin-arm64 + + + +$(OSX_APP_CONTENTS_DIR): + mkdir -p $@ + +$(OSX_APP_CONTENTS_DIR)/MacOS: $(OSX_APP_CONTENTS_DIR) + mkdir -p $@ -dist: bin-dir bbgo-linux bbgo-darwin - mkdir -p $(DIST_DIR) - tar -C $(BUILD_DIR) -cvzf $(DIST_DIR)/bbgo-$$(git describe --tags).tar.gz . +$(OSX_APP_RESOURCES_DIR): $(OSX_APP_CONTENTS_DIR) + mkdir -p $@ + +$(OSX_APP_RESOURCES_DIR)/icon.icns: $(OSX_APP_RESOURCES_DIR) + cp -v desktop/icons/icon.icns $@ + +$(OSX_APP_CONTENTS_DIR)/Info.plist: $(OSX_APP_CONTENTS_DIR) + bash desktop/build-osx-info-plist.sh > $@ + +$(OSX_APP_CONTENTS_DIR)/MacOS/bbgo-desktop: $(OSX_APP_CONTENTS_DIR)/MacOS .FORCE + go build -tags web -o $@ ./cmd/bbgo-$(OSX_APP_GUI) + +desktop-osx: $(OSX_APP_CONTENTS_DIR)/MacOS/bbgo-desktop $(OSX_APP_CONTENTS_DIR)/Info.plist $(OSX_APP_RESOURCES_DIR)/icon.icns + if [[ -n "$(OSX_APP_CODESIGN_IDENTITY)" ]] ; then codesign --deep --force --verbose --sign "$(OSX_APP_CODESIGN_IDENTITY)" $(OSX_APP_DIR) \ + && codesign --verify -vvvv $(OSX_APP_DIR) ; fi + +desktop: desktop-osx + +$(DIST_DIR)/$(VERSION): + mkdir -p $(DIST_DIR)/$(VERSION) + +$(DIST_DIR)/$(VERSION)/bbgo-slim-$(VERSION)-%.tar.gz: bbgo-slim-% $(DIST_DIR)/$(VERSION) + tar -C $(BIN_DIR) -cvzf $@ $< +ifeq ($(SIGN),1) + gpg --yes --detach-sign --armor $@ +endif + +$(DIST_DIR)/$(VERSION)/bbgo-$(VERSION)-%.tar.gz: bbgo-% $(DIST_DIR)/$(VERSION) + tar -C $(BIN_DIR) -cvzf $@ $< +ifeq ($(SIGN),1) + gpg --yes --detach-sign --armor $@ +endif + +$(DIST_DIR)/$(VERSION)/bbgo-slim-dnum-$(VERSION)-%.tar.gz: bbgo-slim-dnum-% $(DIST_DIR)/$(VERSION) + tar -C $(BIN_DIR) -cvzf $@ $< +ifeq ($(SIGN),1) + gpg --yes --detach-sign --armor $@ +endif + +$(DIST_DIR)/$(VERSION)/bbgo-dnum-$(VERSION)-%.tar.gz: bbgo-dnum-% $(DIST_DIR)/$(VERSION) + tar -C $(BIN_DIR) -cvzf $@ $< +ifeq ($(SIGN),1) + gpg --yes --detach-sign --armor $@ +endif + +dist-bbgo-linux: \ + $(DIST_DIR)/$(VERSION)/bbgo-$(VERSION)-linux-arm64.tar.gz \ + $(DIST_DIR)/$(VERSION)/bbgo-$(VERSION)-linux-amd64.tar.gz \ + $(DIST_DIR)/$(VERSION)/bbgo-slim-$(VERSION)-linux-arm64.tar.gz \ + $(DIST_DIR)/$(VERSION)/bbgo-slim-$(VERSION)-linux-amd64.tar.gz \ + $(DIST_DIR)/$(VERSION)/bbgo-dnum-$(VERSION)-linux-arm64.tar.gz \ + $(DIST_DIR)/$(VERSION)/bbgo-dnum-$(VERSION)-linux-amd64.tar.gz \ + $(DIST_DIR)/$(VERSION)/bbgo-slim-dnum-$(VERSION)-linux-arm64.tar.gz \ + $(DIST_DIR)/$(VERSION)/bbgo-slim-dnum-$(VERSION)-linux-amd64.tar.gz + +dist-bbgo-darwin: \ + $(DIST_DIR)/$(VERSION)/bbgo-$(VERSION)-darwin-arm64.tar.gz \ + $(DIST_DIR)/$(VERSION)/bbgo-$(VERSION)-darwin-amd64.tar.gz \ + $(DIST_DIR)/$(VERSION)/bbgo-slim-$(VERSION)-darwin-arm64.tar.gz \ + $(DIST_DIR)/$(VERSION)/bbgo-slim-$(VERSION)-darwin-amd64.tar.gz \ + $(DIST_DIR)/$(VERSION)/bbgo-dnum-$(VERSION)-darwin-arm64.tar.gz \ + $(DIST_DIR)/$(VERSION)/bbgo-dnum-$(VERSION)-darwin-amd64.tar.gz \ + $(DIST_DIR)/$(VERSION)/bbgo-slim-dnum-$(VERSION)-darwin-arm64.tar.gz \ + $(DIST_DIR)/$(VERSION)/bbgo-slim-dnum-$(VERSION)-darwin-amd64.tar.gz + +dist: static dist-bbgo-linux dist-bbgo-darwin desktop + +pkg/version/version.go: .FORCE + BUILD_FLAGS="release" bash utils/generate-version-file.sh > $@ + +pkg/version/dev.go: .FORCE + BUILD_FLAGS="!release" VERSION_SUFFIX="-dev" bash utils/generate-version-file.sh > $@ + +dev-version: pkg/version/dev.go + git add $< + git commit $< -m "update dev build version" + +cmd-doc: .FORCE + go run ./cmd/update-doc + git add -v doc/commands + git commit -m "update command doc files" doc/commands || true + +version: pkg/version/version.go pkg/version/dev.go migrations cmd-doc + git add $< $(word 2,$^) + git commit $< $(word 2,$^) -m "bump version to $(VERSION)" || true + [[ -e doc/release/$(VERSION).md ]] || (echo "file doc/release/$(VERSION).md does not exist" ; exit 1) + git add -v doc/release/$(VERSION).md && git commit doc/release/$(VERSION).md -m "add $(VERSION) release note" || true + git tag -f $(VERSION) + git push origin HEAD + git push -f origin $(VERSION) migrations: - rockhopper compile --config rockhopper.yaml --output pkg/migrations + rockhopper compile --config rockhopper_mysql.yaml --output pkg/migrations/mysql + rockhopper compile --config rockhopper_sqlite.yaml --output pkg/migrations/sqlite3 + git add -v pkg/migrations && git commit -m "compile and update migration package" pkg/migrations || true docker: - GOPATH=$(PWD)/.mod go mod download - docker build --build-arg GO_MOD_CACHE=.mod --tag yoanlin/bbgo . + GOPATH=$(PWD)/_mod go mod download + docker build --build-arg GO_MOD_CACHE=_mod --tag yoanlin/bbgo . bash -c "[[ -n $(DOCKER_TAG) ]] && docker tag yoanlin/bbgo yoanlin/bbgo:$(DOCKER_TAG)" docker-push: docker push yoanlin/bbgo bash -c "[[ -n $(DOCKER_TAG) ]] && docker push yoanlin/bbgo:$(DOCKER_TAG)" -.PHONY: dist migrations +frontend/node_modules: + cd frontend && yarn install + +frontend/out/index.html: frontend/node_modules + cd frontend && yarn export + +pkg/server/assets.go: frontend/out/index.html + go run ./utils/embed -package server -tag web -output $@ $(FRONTEND_EXPORT_DIR) + +$(BACKTEST_REPORT_APP_DIR)/node_modules: + cd $(BACKTEST_REPORT_APP_DIR) && yarn install + +$(BACKTEST_REPORT_APP_DIR)/out/index.html: .FORCE $(BACKTEST_REPORT_APP_DIR)/node_modules + cd $(BACKTEST_REPORT_APP_DIR) && yarn build && yarn export + +pkg/backtest/assets.go: $(BACKTEST_REPORT_APP_DIR)/out/index.html + go run ./utils/embed -package backtest -tag web -output $@ $(BACKTEST_REPORT_EXPORT_DIR) + +embed: pkg/server/assets.go pkg/backtest/assets.go + +static: frontend/out/index.html pkg/server/assets.go pkg/backtest/assets.go + +PROTOS := \ + $(wildcard pkg/pb/*.proto) + +GRPC_GO_DEPS := $(subst .proto,.pb.go,$(PROTOS)) + +%.pb.go: %.proto .FORCE + protoc --go-grpc_out=. --go-grpc_opt=paths=source_relative --go_out=paths=source_relative:. --proto_path=. $< + +grpc-go: $(GRPC_GO_DEPS) + +grpc: grpc-go grpc-py + +install-grpc-tools: + go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.26 + go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.1 + pip install grpcio-tools + +# https://github.com/protocolbuffers/protobuf/issues/1491#issuecomment-261914766 +# replace `import bbgo_pb2` by `from . import bbgo_pb2` to use relative import +grpc-py: + python -m grpc_tools.protoc -I$(PWD)/pkg/pb \ + --python_out=$(PWD)/python \ + --grpc_python_out=$(PWD)/python \ + $(PWD)/pkg/pb/bbgo.proto + +clean: + rm -rf $(BUILD_DIR) $(DIST_DIR) $(FRONTEND_EXPORT_DIR) $(GRPC_GO_DEPS) pkg/pb/*.pb.go + +.PHONY: bbgo bbgo-slim-darwin bbgo-slim-darwin-amd64 bbgo-slim-darwin-arm64 bbgo-darwin version dist pack migrations static embed desktop grpc grpc-go grpc-py .FORCE diff --git a/README.md b/README.md index 641f1eb6de..2e896b34f3 100644 --- a/README.md +++ b/README.md @@ -1,125 +1,455 @@ -# bbgo +# BBGO -A trading bot framework written in Go. The name bbgo comes from the BB8 bot in the Star Wars movie. aka Buy BitCoin Go! +A trading bot framework written in Go. The name bbgo comes from the BB8 bot in the Star Wars movie. ## Current Status -[![Build Status](https://travis-ci.org/c9s/bbgo.svg?branch=main)](https://travis-ci.org/c9s/bbgo) +[![Go](https://github.com/c9s/bbgo/actions/workflows/go.yml/badge.svg?branch=main)](https://github.com/c9s/bbgo/actions/workflows/go.yml) +[![GoDoc](https://godoc.org/github.com/c9s/bbgo?status.svg)](https://pkg.go.dev/github.com/c9s/bbgo) +[![Go Report Card](https://goreportcard.com/badge/github.com/c9s/bbgo)](https://goreportcard.com/report/github.com/c9s/bbgo) +[![DockerHub](https://img.shields.io/docker/pulls/yoanlin/bbgo.svg)](https://hub.docker.com/r/yoanlin/bbgo) +[![Coverage Status](http://codecov.io/github/c9s/bbgo/coverage.svg?branch=main)](http://codecov.io/github/c9s/bbgo?branch=main) +open collective badge +open collective badge + +## Community + +[![Telegram Global](https://img.shields.io/badge/telegram-global-blue.svg)](https://t.me/bbgo_intl) +[![Telegram Taiwan](https://img.shields.io/badge/telegram-tw-blue.svg)](https://t.me/bbgocrypto) +[![Twitter](https://img.shields.io/twitter/follow/bbgotrading?label=Follow&style=social)](https://twitter.com/bbgotrading) + +## What You Can Do With BBGO + +### Trading Bot Users 💁‍♀️ 💁‍♂️ + +You can use BBGO to run the built-in strategies. + +### Strategy Developers 🥷 + +You can use BBGO's trading unit and back-test unit to implement your own strategies. + +### Trading Unit Developers 🧑‍💻 + +You can use BBGO's underlying common exchange API, currently it supports 4+ major exchanges, so you don't have to repeat the implementation. ## Features -- Exchange abstraction interface -- Stream integration (user data websocket) +- Exchange abstraction interface. +- Stream integration (user data websocket, market data websocket). +- Real-time orderBook integration through websocket. +- TWAP order execution support. See [TWAP Order Execution](./doc/topics/twap.md) - PnL calculation. -- Slack notification -- KLine-based Backtest -- Built-in strategies +- Slack/Telegram notification. +- Back-testing: KLine-based back-testing engine. See [Back-testing](./doc/topics/back-testing.md) +- Built-in parameter optimization tool. +- Built-in Grid strategy and many other built-in strategies. +- Multi-exchange session support: you can connect to more than 2 exchanges with different accounts or subaccounts. +- Indicators with interface similar to `pandas.Series`([series](https://github.com/c9s/bbgo/blob/main/doc/development/series.md))([usage](https://github.com/c9s/bbgo/blob/main/doc/development/indicator.md)): + - [Accumulation/Distribution Indicator](./pkg/indicator/ad.go) + - [Arnaud Legoux Moving Average](./pkg/indicator/alma.go) + - [Average True Range](./pkg/indicator/atr.go) + - [Bollinger Bands](./pkg/indicator/boll.go) + - [Commodity Channel Index](./pkg/indicator/cci.go) + - [Cumulative Moving Average](./pkg/indicator/cma.go) + - [Double Exponential Moving Average](./pkg/indicator/dema.go) + - [Directional Movement Index](./pkg/indicator/dmi.go) + - [Brownian Motion's Drift Factor](./pkg/indicator/drift.go) + - [Ease of Movement](./pkg/indicator/emv.go) + - [Exponentially Weighted Moving Average](./pkg/indicator/ewma.go) + - [Hull Moving Average](./pkg/indicator/hull.go) + - [Trend Line (Tool)](./pkg/indicator/line.go) + - [Moving Average Convergence Divergence Indicator](./pkg/indicator/macd.go) + - [On-Balance Volume](./pkg/indicator/obv.go) + - [Pivot](./pkg/indicator/pivot.go) + - [Running Moving Average](./pkg/indicator/rma.go) + - [Relative Strength Index](./pkg/indicator/rsi.go) + - [Simple Moving Average](./pkg/indicator/sma.go) + - [Ehler's Super Smoother Filter](./pkg/indicator/ssf.go) + - [Stochastic Oscillator](./pkg/indicator/stoch.go) + - [SuperTrend](./pkg/indicator/supertrend.go) + - [Triple Exponential Moving Average](./pkg/indicator/tema.go) + - [Tillson T3 Moving Average](./pkg/indicator/till.go) + - [Triangular Moving Average](./pkg/indicator/tma.go) + - [Variable Index Dynamic Average](./pkg/indicator/vidya.go) + - [Volatility Indicator](./pkg/indicator/volatility.go) + - [Volume Weighted Average Price](./pkg/indicator/vwap.go) + - [Zero Lag Exponential Moving Average](./pkg/indicator/zlema.go) + - And more... +- HeikinAshi OHLC / Normal OHLC (check [this config](https://github.com/c9s/bbgo/blob/main/config/skeleton.yaml#L5)) +- React-powered Web Dashboard. +- Docker image ready. +- Kubernetes support. +- Helm chart ready. +- High precision float point (up to 16 digits, run with `-tags dnum`). + +## Screenshots + +![bbgo dashboard](assets/screenshots/dashboard.jpeg) + +![bbgo backtest report](assets/screenshots/backtest-report.jpg) ## Supported Exchanges -- MAX Exchange (located in Taiwan) -- Binance Exchange +- Binance Spot Exchange (and binance.us) +- FTX Spot Exchange +- OKEx Spot Exchange +- Kucoin Spot Exchange +- MAX Spot Exchange (located in Taiwan) + +## Documentation and General Topics + +- Check the [documentation index](doc/README.md) + +## BBGO Tokenomics + +To support the development of BBGO, we have created a bounty pool to support contributors by giving away $BBG tokens. +Check the details in [$BBG Contract Page](contracts/README.md) and our [official website](https://bbgo.finance) ## Requirements -Get your exchange API key and secret after you register the accounts: +Get your exchange API key and secret after you register the accounts (you can choose one or more exchanges): -- For MAX: -- For Binance: +- MAX: +- Binance: +- FTX: +- OKEx: +- Kucoin: + +This project is maintained and supported by a small group of team. If you would like to support this project, please register on the exchanges using the provided links with referral codes above. ## Installation -Setup MySQL or [run it in docker](https://hub.docker.com/_/mysql) +### Install from binary -Install the builtin commands: +The following script will help you set up a config file and a dotenv file: ```sh -go get -u github.com/c9s/bbgo/cmd/bbgo +# grid trading strategy for binance exchange +bash <(curl -s https://raw.githubusercontent.com/c9s/bbgo/main/scripts/setup-grid.sh) binance + +# grid trading strategy for max exchange +bash <(curl -s https://raw.githubusercontent.com/c9s/bbgo/main/scripts/setup-grid.sh) max + +# bollinger grid trading strategy for binance exchange +bash <(curl -s https://raw.githubusercontent.com/c9s/bbgo/main/scripts/setup-bollgrid.sh) binance + +# bollinger grid trading strategy for max exchange +bash <(curl -s https://raw.githubusercontent.com/c9s/bbgo/main/scripts/setup-bollgrid.sh) max ``` -Add your dotenv file: +If you already have configuration somewhere, a download-only script might be suitable for you: +```sh +bash <(curl -s https://raw.githubusercontent.com/c9s/bbgo/main/scripts/download.sh) ``` -SLACK_TOKEN= -TELEGRAM_BOT_TOKEN= -TELEGRAM_BOT_AUTH_TOKEN= +Or refer to the [Release Page](https://github.com/c9s/bbgo/releases) and download manually. + +Since v2, we've added new float point implementation from dnum to support decimals with higher precision. +To download & setup, please refer to [Dnum Installation](doc/topics/dnum-binary.md) +### One-click Linode StackScript + +- BBGO Grid Trading on Binance +- BBGO USDT/TWD Grid Trading on MAX +- BBGO USDC/TWD Grid Trading on MAX +- BBGO LINK/TWD Grid Trading on MAX +- BBGO USDC/USDT Grid Trading on MAX +- BBGO Standard Grid Trading on MAX + +### Build from source + +See [Build from source](./doc/build-from-source.md) + +## Configuration + +Add your dotenv file: + +```sh +# for Binance Exchange, if you have one BINANCE_API_KEY= BINANCE_API_SECRET= +# if you want to use binance.us, change this to 1 +BINANCE_US=0 + +# for MAX exchange, if you have one MAX_API_KEY= MAX_API_SECRET= -MYSQL_URL=root@tcp(127.0.0.1:3306)/bbgo?parseTime=true +# for FTX exchange, if you have one +FTX_API_KEY= +FTX_API_SECRET= +# specify it if credentials are for subaccount +FTX_SUBACCOUNT= + +# for OKEx exchange, if you have one +OKEX_API_KEY= +OKEX_API_SECRET= +OKEX_API_PASSPHRASE + +# for kucoin exchange, if you have one +KUCOIN_API_KEY= +KUCOIN_API_SECRET= +KUCOIN_API_PASSPHRASE= +KUCOIN_API_KEY_VERSION=2 ``` -Make sure you have [dotenv](https://github.com/bkeepers/dotenv) +Prepare your dotenv file `.env.local` and BBGO yaml config file `bbgo.yaml`. + +To check the available environment variables, please see [Environment Variables](./doc/configuration/envvars.md) -To sync remote exchange klines data for backtesting: +The minimal bbgo.yaml could be generated by: ```sh -dotenv -f .env.local -- bbgo backtest --exchange binance --config config/grid.yaml -v --sync --sync-only --sync-from 2020-01-01 +curl -o bbgo.yaml https://raw.githubusercontent.com/c9s/bbgo/main/config/minimal.yaml ``` -To run backtest: +To run strategy: + +```sh +bbgo run +``` + +To start bbgo with the frontend dashboard: ```sh -dotenv -f .env.local -- bbgo backtest --exchange binance --config config/bollgrid.yaml --base-asset-baseline +bbgo run --enable-webserver ``` +If you want to switch to other dotenv file, you can add an `--dotenv` option or `--config`: + +```sh +bbgo sync --dotenv .env.dev --config config/grid.yaml --session binance +``` To query transfer history: ```sh -dotenv -f .env.local -- bbgo transfer-history --exchange max --asset USDT --since "2019-01-01" +bbgo transfer-history --session max --asset USDT --since "2019-01-01" ``` + -To run strategy: +## Advanced Configuration + +### Testnet (Paper Trading) + +Currently only supports binance testnet. +To run bbgo in testnet, apply new API keys from [Binance Test Network](https://testnet.binance.vision), and set the following env before you start bbgo: + +```bash +export PAPER_TRADE=1 +export DISABLE_MARKET_CACHE=1 # the symbols supported in testnet is far less than the mainnet +``` + +### Notification + +- [Setting up Telegram notification](./doc/configuration/telegram.md) +- [Setting up Slack notification](./doc/configuration/slack.md) + +### Synchronizing Trading Data + +By default, BBGO does not sync your trading data from the exchange sessions, so it's hard to calculate your profit and +loss correctly. + +By synchronizing trades and orders to the local database, you can earn some benefits like PnL calculations, backtesting +and asset calculation. + +You can only use one database driver MySQL or SQLite to store your trading data. + +**Notice**: SQLite is not fully supported, we recommend you use MySQL instead of SQLite. + +#### Configure MySQL Database + +To use MySQL database for data syncing, first you need to install your mysql server: + +```sh +# For Ubuntu Linux +sudo apt-get install -y mysql-server + +# For newer Ubuntu Linux +sudo apt install -y mysql-server +``` + +Or [run it in docker](https://hub.docker.com/_/mysql) + +Create your mysql database: ```sh -dotenv -f .env.local -- bbgo run --config config/buyandhold.yaml +mysql -uroot -e "CREATE DATABASE bbgo CHARSET utf8" +``` + +Then put these environment variables in your `.env.local` file: + +```sh +DB_DRIVER=mysql +DB_DSN="user:password@tcp(127.0.0.1:3306)/bbgo" +``` + +#### Configure Sqlite3 Database + +To use SQLite3 instead of MySQL, simply put these environment variables in your `.env.local` file: + +```sh +DB_DRIVER=sqlite3 +DB_DSN=bbgo.sqlite3 +``` + +## Synchronizing your own trading data + +Once you have your database configured, you can sync your own trading data from the exchange. + +See [Configure Sync For Private Trading Data](./doc/configuration/sync.md) + +## Using Redis to keep persistence between BBGO sessions + +To use Redis, first you need to install your Redis server: + +```sh +# For Ubuntu/Debian Linux +sudo apt-get install -y redis + +# For newer Ubuntu/Debian Linux +sudo apt install -y redis +``` + +Set the following environment variables in your `bbgo.yaml`: + +```yaml +persistence: + redis: + host: 127.0.0.1 # The IP address or the hostname to your Redis server, 127.0.0.1 if same as BBGO + port: 6379 # Port to Redis server, default 6379 + db: 0 # DB number to use. You can set to another DB to avoid conflict if other applications are using Redis too. ``` ## Built-in Strategies Check out the strategy directory [strategy](pkg/strategy) for all built-in strategies: -- `pricealert` strategy demonstrates how to use the notification system [pricealert](pkg/strategy/pricealert) -- `xpuremaker` strategy demonstrates how to maintain the orderbook and submit maker orders [xpuremaker](pkg/strategy/xpuremaker) -- `buyandhold` strategy demonstrates how to subscribe kline events and submit market order [buyandhold](pkg/strategy/buyandhold) -- `grid` strategy implements a basic grid strategy with the built-in bollinger indicator [grid](pkg/strategy/grid) +- `pricealert` strategy demonstrates how to use the notification system [pricealert](pkg/strategy/pricealert). See + [document](./doc/strategy/pricealert.md). +- `xpuremaker` strategy demonstrates how to maintain the orderbook and submit maker + orders [xpuremaker](pkg/strategy/xpuremaker) +- `buyandhold` strategy demonstrates how to subscribe kline events and submit market + order [buyandhold](pkg/strategy/pricedrop) +- `bollgrid` strategy implements a basic grid strategy with the built-in bollinger + indicator [bollgrid](pkg/strategy/bollgrid) +- `grid` strategy implements the fixed price band grid strategy [grid](pkg/strategy/grid). See + [document](./doc/strategy/grid.md). +- `supertrend` strategy uses Supertrend indicator as trend, and DEMA indicator as noise filter [supertrend](pkg/strategy/supertrend). See + [document](./doc/strategy/supertrend.md). +- `support` strategy uses K-lines with high volume as support [support](pkg/strategy/support). See + [document](./doc/strategy/support.md). - `flashcrash` strategy implements a strategy that catches the flashcrash [flashcrash](pkg/strategy/flashcrash) -To run these built-in strategies, just -modify the config file to make the configuration suitable for you, for example if you want to run +To run these built-in strategies, just modify the config file to make the configuration suitable for you, for example if +you want to run `buyandhold` strategy: ```sh vim config/buyandhold.yaml # run bbgo with the config -dotenv -f .env.local -- bbgo run --config config/buyandhold.yaml +bbgo run --config config/buyandhold.yaml ``` -## Write your own strategy +## Back-testing -Create your go package, and initialize the repository with `go mod` and add bbgo as a dependency: +See [Back-testing](./doc/topics/back-testing.md) + +## Adding New Built-in Strategy + +Fork and clone this repository, Create a directory under `pkg/strategy/newstrategy`, write your strategy +at `pkg/strategy/newstrategy/strategy.go`. + +Define a strategy struct: + +```go +package newstrategy + +import ( + "github.com/c9s/bbgo/pkg/fixedpoint" +) +type Strategy struct { + Symbol string `json:"symbol"` + Param1 int `json:"param1"` + Param2 int `json:"param2"` + Param3 fixedpoint.Value `json:"param3"` +} ``` + +Register your strategy: + +```go +package newstrategy + +const ID = "newstrategy" + +const stateKey = "state-v1" + +var log = logrus.WithField("strategy", ID) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} +``` + +Implement the strategy methods: + +```go +package newstrategy + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: "2m"}) +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + // .... + return nil +} +``` + +Edit `pkg/cmd/builtin.go`, and import the package, like this: + +```go +package cmd + +// import built-in strategies +import ( + _ "github.com/c9s/bbgo/pkg/strategy/bollgrid" + _ "github.com/c9s/bbgo/pkg/strategy/buyandhold" + _ "github.com/c9s/bbgo/pkg/strategy/flashcrash" + _ "github.com/c9s/bbgo/pkg/strategy/grid" + _ "github.com/c9s/bbgo/pkg/strategy/pricealert" + _ "github.com/c9s/bbgo/pkg/strategy/support" + _ "github.com/c9s/bbgo/pkg/strategy/swing" + _ "github.com/c9s/bbgo/pkg/strategy/trailingstop" + _ "github.com/c9s/bbgo/pkg/strategy/xmaker" + _ "github.com/c9s/bbgo/pkg/strategy/xpuremaker" +) +``` + +## Write your own private strategy + +Create your go package, and initialize the repository with `go mod` and add bbgo as a dependency: + +```sh go mod init go get github.com/c9s/bbgo@main ``` Write your own strategy in the strategy file: -``` +```sh vim strategy.go ``` @@ -127,7 +457,7 @@ You can grab the skeleton strategy from +- +- +- + +## Command Usages + +### Submitting Orders to a specific exchagne session + +```shell +bbgo submit-order --session=okex --symbol=OKBUSDT --side=buy --price=10.0 --quantity=1 +``` + +### Listing Open Orders of a specific exchange session + +```sh +bbgo list-orders open --session=okex --symbol=OKBUSDT +bbgo list-orders open --session=ftx --symbol=FTTUSDT +bbgo list-orders open --session=max --symbol=MAXUSDT +bbgo list-orders open --session=binance --symbol=BNBUSDT +``` + +### Canceling an open order + +```shell +# both order id and symbol is required for okex +bbgo cancel-order --session=okex --order-id=318223238325248000 --symbol=OKBUSDT + +# for max, you can just give your order id +bbgo cancel-order --session=max --order-id=1234566 +``` + +### Debugging user data stream + +```shell +bbgo userdatastream --session okex +bbgo userdatastream --session max +bbgo userdatastream --session binance +``` + ## Dynamic Injection In order to minimize the strategy code, bbgo supports dynamic dependency injection. -Before executing your strategy, bbgo injects the components into your strategy object if -it found the embedded field that is using bbgo component. for example: +Before executing your strategy, bbgo injects the components into your strategy object if it found the embedded field +that is using bbgo component. for example: ```go type Strategy struct { - *bbgo.Notifiability +*bbgo.Notifiability } ``` @@ -181,13 +553,21 @@ Supported components (single exchange strategy only for now): - `*bbgo.Notifiability` - `bbgo.OrderExecutor` - -If you have `Symbol string` field in your strategy, your strategy will be detected as a symbol-based strategy, -then the following types could be injected automatically: +If you have `Symbol string` field in your strategy, your strategy will be detected as a symbol-based strategy, then the +following types could be injected automatically: - `*bbgo.ExchangeSession` - `types.Market` +## Strategy Execution Phases + +1. Load config from the config file. +2. Allocate and initialize exchange sessions. +3. Add exchange sessions to the environment (the data layer). +4. Use the given environment to initialize the trader object (the logic layer). +5. The trader initializes the environment and start the exchange connections. +6. Call strategy.Run() method sequentially. + ## Exchange API Examples Please check out the example directory: [examples](examples) @@ -212,76 +592,64 @@ streambook := types.NewStreamBook(symbol) streambook.BindStream(stream) ``` -## Telegram Integration +## Deployment -- In telegram: @botFather -- /newbot -- input bot display name. ex. `bbgo_bot` -- input bot username. This should be global unique. ex. `PeqFqJxP_bbgo_bot` -- Botfather return bot token. Keep bot token safe -- Set `TELEGRAM_BOT_TOKEN` in `.env.local` -- Set `TELEGRAM_AUTH_TOKEN` in `.env.local`. Generate your own auth token. ex. 92463901, or kx2UX@eM -- Run bbgo -- In telegram: search your bot `PeqFqJxP_bbgo_bot` -- /start -- /auth 92463901 -- done! your session will route to telegram +- [Helm Chart](./doc/deployment/helm-chart.md) +- Baremetal machine or a VPS -## Helm Chart +## Development -Prepare your docker image locally (you can also use the docker image from docker hub): +- [Adding New Exchange](./doc/development/adding-new-exchange.md) +- [Migration](./doc/development/migration.md) -``` -make docker DOCKER_TAG=1.16.0 -``` +### Setting up your local repository -The docker tag version number is from the file [Chart.yaml](charts/bbgo/Chart.yaml) +1. Click the "Fork" button from the GitHub repository. +2. Clone your forked repository into `$GOPATH/github.com/c9s/bbgo`. +3. Change directory into `$GOPATH/github.com/c9s/bbgo`. +4. Create a branch and start your development. +5. Test your changes. +6. Push your changes to your fork. +7. Send a pull request. -Prepare your secret: +### Testing Desktop App -``` -kubectl create secret generic bbgo-grid --from-env-file .env.local -``` - -Configure your config file, the chart defaults to read config/bbgo.yaml to -create a configmap: +for webview -``` -cp config/grid.yaml config/bbgo.yaml -vim config/bbgo.yaml +```sh +make embed && go run -tags web ./cmd/bbgo-webview ``` -Install chart with the preferred release name, the release name maps to the -previous secret we just created, that is, `bbgo-grid`: +for lorca -``` -helm install bbgo-grid ./charts/bbgo +```sh +make embed && go run -tags web ./cmd/bbgo-lorca ``` -Delete chart: +## FAQ -``` -helm delete bbgo -``` +What's Position? + +- Base Currency & Quote Currency +- How to calculate average cost? -## Support +## Looking For A New Strategy? -### By contributing pull requests +You can write an article about BBGO in any topic, in 750-1500 words for exchange, and I can implement the strategy for you (depends on the complexity and efforts). +If you're interested in, DM me in telegram or twitter , we can discuss. -Any pull request is welcome, documentation, format fixing, testing, features. +## Contributing -### By registering account with referral ID +See [Contributing](./CONTRIBUTING.md) -You may register your exchange account with my referral ID to support this project. +### Financial Contributors -- For MAX Exchange: (default commission rate to your account) -- For Binance Exchange: (5% commission back to your account) + -### By small amount cryptos +## Supporter -- BTC address `3J6XQJNWT56amqz9Hz2BEVQ7W4aNmb5kiU` -- USDT ERC20 address `0x63E5805e027548A384c57E20141f6778591Bac6F` +- GitBook ## License -MIT License +AGPL License diff --git a/apps/backtest-report/.eslintrc.json b/apps/backtest-report/.eslintrc.json new file mode 100644 index 0000000000..bffb357a71 --- /dev/null +++ b/apps/backtest-report/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "next/core-web-vitals" +} diff --git a/apps/backtest-report/.gitignore b/apps/backtest-report/.gitignore new file mode 100644 index 0000000000..737d872109 --- /dev/null +++ b/apps/backtest-report/.gitignore @@ -0,0 +1,35 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.js + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# local env files +.env*.local + +# vercel +.vercel + +# typescript +*.tsbuildinfo diff --git a/apps/backtest-report/README.md b/apps/backtest-report/README.md new file mode 100644 index 0000000000..332787c08d --- /dev/null +++ b/apps/backtest-report/README.md @@ -0,0 +1,54 @@ +This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app). + +## Getting Started + +Install the dependencies: + +``` +yarn install +``` + + +Create a symlink to your back-test report output directory: + +``` +(cd public && ln -s ../../../output output) +``` + + +Generate some back-test reports: + +``` +(cd ../.. && go run ./cmd/bbgo backtest --config bollmaker_ethusdt.yaml --debug --session binance --output output --subdir) +``` + +Start the development server: + +```bash +npm run dev +# or +yarn dev +``` + +Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. + +You can start editing the page by modifying `pages/index.tsx`. The page auto-updates as you edit the file. + +[API routes](https://nextjs.org/docs/api-routes/introduction) can be accessed on [http://localhost:3000/api/hello](http://localhost:3000/api/hello). This endpoint can be edited in `pages/api/hello.ts`. + +The `pages/api` directory is mapped to `/api/*`. Files in this directory are treated as [API routes](https://nextjs.org/docs/api-routes/introduction) instead of React pages. + +## Learn More + +To learn more about Next.js, take a look at the following resources: + +- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API. +- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial. + +You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome! + +## Deploy on Vercel + +The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. + +Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details. diff --git a/apps/backtest-report/components/ReportDetails.tsx b/apps/backtest-report/components/ReportDetails.tsx new file mode 100644 index 0000000000..5e7ef122e3 --- /dev/null +++ b/apps/backtest-report/components/ReportDetails.tsx @@ -0,0 +1,246 @@ +import React, {useEffect, useState} from 'react'; + +import moment from 'moment'; + +import TradingViewChart from './TradingViewChart'; + +import {BalanceMap, ReportSummary} from "../types"; + +import { + Badge, + Container, + createStyles, + Grid, + Group, + Paper, + SimpleGrid, + Skeleton, + Table, + Text, + ThemeIcon, + Title +} from '@mantine/core'; + +import {ArrowDownRight, ArrowUpRight,} from 'tabler-icons-react'; + +const useStyles = createStyles((theme) => ({ + root: { + paddingTop: theme.spacing.xl * 1.5, + paddingBottom: theme.spacing.xl * 1.5, + }, + + label: { + fontFamily: `Greycliff CF, ${theme.fontFamily}`, + }, +})); + +interface StatsGridIconsProps { + data: { + title: string; + value: string; + diff?: number + dir?: string; + desc?: string; + }[]; +} + +function StatsGridIcons({data}: StatsGridIconsProps) { + const {classes} = useStyles(); + const stats = data.map((stat) => { + const DiffIcon = stat.diff && stat.diff > 0 ? ArrowUpRight : ArrowDownRight; + const DirIcon = stat.dir && stat.dir == "up" ? ArrowUpRight : ArrowDownRight; + + return ( + + +
+ + {stat.title} + {stat.dir ? + ({color: stat.dir == "up" ? theme.colors.teal[6] : theme.colors.red[6]})} + size={16} + radius="xs" + > + + + : null} + + + {stat.value} + +
+ + + {stat.diff ? + ({color: stat.diff && stat.diff > 0 ? theme.colors.teal[6] : theme.colors.red[6]})} + size={38} + radius="md" + > + + + : null} +
+ + {stat.diff ? + + 0 ? 'teal' : 'red'} weight={700}> + {stat.diff}% + {' '} + {stat.diff && stat.diff > 0 ? 'increase' : 'decrease'} compared to last month + : null} + + {stat.desc ? ( + + {stat.desc} + + ) : null} + +
+ ); + }); + + return ( + + {stats} + + ); +} + + +interface ReportDetailsProps { + basePath: string; + runID: string; +} + +const fetchReportSummary = (basePath: string, runID: string) => { + return fetch( + `${basePath}/${runID}/summary.json`, + ) + .then((res) => res.json()) + .catch((e) => { + console.error("failed to fetch index", e) + }); +} + +const skeleton = ; + + +interface BalanceDetailsProps { + balances: BalanceMap; +} + +const BalanceDetails = (props: BalanceDetailsProps) => { + const rows = Object.entries(props.balances).map(([k, v]) => { + return + {k} + {v.available} + ; + }); + + return + + + + + + + {rows} +
CurrencyBalance
; +}; + +const ReportDetails = (props: ReportDetailsProps) => { + const [reportSummary, setReportSummary] = useState() + useEffect(() => { + fetchReportSummary(props.basePath, props.runID).then((summary: ReportSummary) => { + console.log("summary", props.runID, summary); + setReportSummary(summary) + }) + }, [props.runID]) + + if (!reportSummary) { + return
+

Loading {props.runID}

+
; + } + + const strategyName = props.runID.split("_")[1] + const runID = props.runID.split("_").pop() + const totalProfit = Math.round(reportSummary.symbolReports.map((report) => report.pnl.profit).reduce((prev, cur) => prev + cur) * 100) / 100 + const totalUnrealizedProfit = Math.round(reportSummary.symbolReports.map((report) => report.pnl.unrealizedProfit).reduce((prev, cur) => prev + cur) * 100) / 100 + const totalTrades = reportSummary.symbolReports.map((report) => report.pnl.numTrades).reduce((prev, cur) => prev + cur) || 0 + + const totalBuyVolume = reportSummary.symbolReports.map((report) => report.pnl.buyVolume).reduce((prev, cur) => prev + cur) || 0 + const totalSellVolume = reportSummary.symbolReports.map((report) => report.pnl.sellVolume).reduce((prev, cur) => prev + cur) || 0 + + const volumeUnit = reportSummary.symbolReports.length == 1 ? reportSummary.symbolReports[0].market.baseCurrency : ''; + + return
+ +
+ Strategy: {strategyName} + {reportSummary.sessions.map((session) => Exchange: {session})} + {reportSummary.symbols.map((symbol) => Symbol: {symbol})} + + {reportSummary.startTime.toString()} — {reportSummary.endTime.toString()} ~ { + moment.duration((new Date(reportSummary.endTime)).getTime() - (new Date(reportSummary.startTime)).getTime()).humanize() + } + Run ID: {runID} +
+ = 0 ? "up" : "down"}, + { + title: "Unr. Profit", + value: totalUnrealizedProfit.toString() + "$", + dir: totalUnrealizedProfit > 0 ? "up" : "down" + }, + {title: "Trades", value: totalTrades.toString()}, + {title: "Buy Vol", value: totalBuyVolume.toString() + ` ${volumeUnit}`}, + {title: "Sell Vol", value: totalSellVolume.toString() + ` ${volumeUnit}`}, + ]}/> + + + + Initial Total Balances + + + + Final Total Balances + + + + + { + /* + + + + + {skeleton} + + */ + } +
+ { + reportSummary.symbols.map((symbol: string, i: number) => { + return + }) + } +
+ +
+
; +}; + +export default ReportDetails; diff --git a/apps/backtest-report/components/ReportNavigator.tsx b/apps/backtest-report/components/ReportNavigator.tsx new file mode 100644 index 0000000000..55e16d3bcd --- /dev/null +++ b/apps/backtest-report/components/ReportNavigator.tsx @@ -0,0 +1,79 @@ +import React, {useEffect, useState} from 'react'; +import {List, ThemeIcon} from '@mantine/core'; +import {CircleCheck} from 'tabler-icons-react'; + +import {ReportEntry, ReportIndex} from '../types'; + +function fetchIndex(basePath: string, setter: (data: any) => void) { + return fetch( + `${basePath}/index.json`, + ) + .then((res) => res.json()) + .then((data) => { + console.log("reportIndex", data); + data.runs.reverse() // last reports render first + setter(data); + }) + .catch((e) => { + console.error("failed to fetch index", e) + }); +} + +interface ReportNavigatorProps { + onSelect: (reportEntry: ReportEntry) => void; +} + +const ReportNavigator = (props: ReportNavigatorProps) => { + const [isLoading, setLoading] = useState(false) + const [reportIndex, setReportIndex] = useState({runs: []}); + + useEffect(() => { + setLoading(true) + fetchIndex('/output', setReportIndex).then(() => { + setLoading(false); + }) + }, []); + + if (isLoading) { + return
Loading...
; + } + + if (reportIndex.runs.length == 0) { + return
No back-test report data
+ } + + return
+ + + + } + > + { + reportIndex.runs.map((entry) => { + return { + if (props.onSelect) { + props.onSelect(entry); + } + }}> +
+ {entry.id} +
+
+ }) + } +
+
; + + +}; + +export default ReportNavigator; diff --git a/apps/backtest-report/components/TradingViewChart.tsx b/apps/backtest-report/components/TradingViewChart.tsx new file mode 100644 index 0000000000..f6143230f0 --- /dev/null +++ b/apps/backtest-report/components/TradingViewChart.tsx @@ -0,0 +1,503 @@ +import React, {useEffect, useRef, useState} from 'react'; +import {tsvParse} from "d3-dsv"; +import {Button} from '@mantine/core'; + +// https://github.com/tradingview/lightweight-charts/issues/543 +// const createChart = dynamic(() => import('lightweight-charts')); +import {createChart, CrosshairMode} from 'lightweight-charts'; +import {ReportSummary} from "../types"; +import moment from "moment"; + +const parseKline = () => { + return (d : any) => { + d.startTime = new Date(Number(d.startTime) * 1000); + d.endTime = new Date(Number(d.endTime) * 1000); + d.time = d.startTime.getTime() / 1000; + + for (const key in d) { + // convert number fields + if (Object.prototype.hasOwnProperty.call(d, key)) { + switch (key) { + case "open": + case "high": + case "low": + case "close": + case "volume": + d[key] = +d[key]; + break + } + } + } + + return d; + }; +}; + + +const parseOrder = () => { + return (d: any) => { + for (const key in d) { + // convert number fields + if (Object.prototype.hasOwnProperty.call(d, key)) { + switch (key) { + case "order_id": + case "price": + case "quantity": + d[key] = +d[key]; + break; + case "update_time": + case "creation_time": + case "time": + d[key] = new Date(d[key]); + break; + } + } + } + return d; + }; +} + +const parsePosition = () => { + return (d: any) => { + for (const key in d) { + // convert number fields + if (Object.prototype.hasOwnProperty.call(d, key)) { + switch (key) { + case "accumulated_profit": + case "average_cost": + case "quote": + case "base": + d[key] = +d[key]; + break + case "time": + d[key] = new Date(d[key]); + break + } + } + } + return d; + }; +} + +const fetchPositionHistory = (basePath: string, runID: string, filename: string) => { + return fetch( + `${basePath}/${runID}/${filename}`, + ) + .then((response) => response.text()) + .then((data) => tsvParse(data, parsePosition()) as Array) + .catch((e) => { + console.error("failed to fetch orders", e) + }); +}; + +const fetchOrders = (basePath: string, runID: string) => { + return fetch( + `${basePath}/${runID}/orders.tsv`, + ) + .then((response) => response.text()) + .then((data: string) => tsvParse(data, parseOrder()) as Array) + .catch((e) => { + console.error("failed to fetch orders", e) + }); +} + +const parseInterval = (s: string) => { + switch (s) { + case "1m": + return 60; + case "5m": + return 60 * 5; + case "15m": + return 60 * 15; + case "30m": + return 60 * 30; + case "1h": + return 60 * 60; + case "4h": + return 60 * 60 * 4; + case "6h": + return 60 * 60 * 6; + case "12h": + return 60 * 60 * 12; + case "1d": + return 60 * 60 * 24; + } + + return 60; +}; + +interface Order { + order_type: string; + side: string; + price: number; + quantity: number; + executed_quantity: number; + status: string; + update_time: Date; + creation_time: Date; + time?: Date; +} + +interface Marker { + time: number; + position: string; + color: string; + shape: string; + text: string; +} + +const ordersToMarkets = (interval: string, orders: Array | void): Array => { + const markers: Array = []; + const intervalSecs = parseInterval(interval); + + if (!orders) { + return markers; + } + + // var markers = [{ time: data[data.length - 48].time, position: 'aboveBar', color: '#f68410', shape: 'circle', text: 'D' }]; + for (let i = 0; i < orders.length; i++) { + let order = orders[i]; + let t = (order.update_time || order.time).getTime() / 1000.0; + let lastMarker = markers.length > 0 ? markers[markers.length - 1] : null; + if (lastMarker) { + let remainder = lastMarker.time % intervalSecs; + let startTime = lastMarker.time - remainder; + let endTime = (startTime + intervalSecs); + // skip the marker in the same interval of the last marker + if (t < endTime) { + // continue + } + } + + switch (order.side) { + case "BUY": + markers.push({ + time: t, + position: 'belowBar', + color: '#239D10', + shape: 'arrowUp', + text: ''+order.price.toFixed(0), + //text: 'B', + }); + break; + case "SELL": + markers.push({ + time: t, + position: 'aboveBar', + color: '#e91e63', + shape: 'arrowDown', + text: ''+order.price.toFixed(0), + //text: 'S', + }); + break; + } + } + return markers; +}; + +const removeDuplicatedKLines = (klines: Array): Array => { + const newK = []; + for (let i = 0; i < klines.length; i++) { + const k = klines[i]; + + if (i > 0 && k.time === klines[i - 1].time) { + console.warn(`duplicated kline at index ${i}`, k) + continue + } + + newK.push(k); + } + return newK; +} + +function fetchKLines(basePath: string, runID: string, symbol: string, interval: string, startTime: Date, endTime: Date) { + var duration = [moment(startTime).format('YYYYMMDD'), moment(endTime).format('YYYYMMDD')]; + return fetch( + `${basePath}/shared/klines_${duration.join('-')}/${symbol}-${interval}.tsv`, + ) + .then((response) => response.text()) + .then((data) => tsvParse(data, parseKline())) + .catch((e) => { + console.error("failed to fetch klines", e) + }); +} + +interface KLine { + time: Date; + startTime: Date; + endTime: Date; + interval: string; + open: number; + high: number; + low: number; + close: number; + volume: number; +} + +const klinesToVolumeData = (klines: Array) => { + const volumes = []; + + for (let i = 0; i < klines.length; i++) { + const kline = klines[i]; + volumes.push({ + time: (kline.startTime.getTime() / 1000), + value: kline.volume, + }) + } + + return volumes; +} + + +interface PositionHistoryEntry { + time: Date; + base: number; + quote: number; + average_cost: number; +} + +const positionBaseHistoryToLineData = (interval: string, hs: Array) => { + const bases = []; + const intervalSeconds = parseInterval(interval); + for (let i = 0; i < hs.length; i++) { + const pos = hs[i]; + if (!pos.time) { + console.warn('position history record missing time field', pos) + continue + } + + // ignore duplicated entry + if (i > 0 && hs[i].time.getTime() === hs[i - 1].time.getTime()) { + continue + } + + let t = pos.time.getTime() / 1000; + t = (t - t % intervalSeconds) + + if (i > 0 && (pos.base === hs[i - 1].base)) { + continue; + } + + bases.push({ + time: t, + value: pos.base, + }); + } + return bases; +} + + +const positionAverageCostHistoryToLineData = (interval: string, hs: Array) => { + const avgCosts = []; + const intervalSeconds = parseInterval(interval); + for (let i = 0; i < hs.length; i++) { + const pos = hs[i]; + + if (!pos.time) { + console.warn('position history record missing time field', pos) + continue + } + + // ignore duplicated entry + if (i > 0 && hs[i].time.getTime() === hs[i - 1].time.getTime()) { + continue + } + + + let t = pos.time.getTime() / 1000; + t = (t - t % intervalSeconds) + + if (i > 0 && (pos.average_cost === hs[i - 1].average_cost)) { + continue; + } + + if (pos.base === 0) { + avgCosts.push({ + time: t, + value: 0, + }); + } else { + avgCosts.push({ + time: t, + value: pos.average_cost, + }); + } + + + } + return avgCosts; +} + +const createBaseChart = (chartContainerRef: React.RefObject) => { + return createChart(chartContainerRef.current, { + width: chartContainerRef.current.clientWidth, + height: chartContainerRef.current.clientHeight, + timeScale: { + timeVisible: true, + borderColor: '#D1D4DC', + }, + rightPriceScale: { + borderColor: '#D1D4DC', + }, + leftPriceScale: { + visible: true, + borderColor: 'rgba(197, 203, 206, 1)', + }, + layout: { + backgroundColor: '#ffffff', + textColor: '#000', + }, + crosshair: { + mode: CrosshairMode.Normal, + }, + grid: { + horzLines: { + color: '#F0F3FA', + }, + vertLines: { + color: '#F0F3FA', + }, + }, + }); +}; + + +interface TradingViewChartProps { + basePath: string; + runID: string; + reportSummary: ReportSummary; + symbol: string; +} + +const TradingViewChart = (props: TradingViewChartProps) => { + const chartContainerRef = useRef(); + const chart = useRef(); + const resizeObserver = useRef(); + const intervals = props.reportSummary.intervals || []; + const [currentInterval, setCurrentInterval] = useState(intervals.length > 0 ? intervals[0] : '1m'); + + useEffect(() => { + if (!chartContainerRef.current || chartContainerRef.current.children.length > 0) { + return; + } + + const chartData: any = {}; + const fetchers = []; + const ordersFetcher = fetchOrders(props.basePath, props.runID).then((orders) => { + const markers = ordersToMarkets(currentInterval, orders); + chartData.orders = orders; + chartData.markers = markers; + return orders; + }); + fetchers.push(ordersFetcher); + + if (props.reportSummary && props.reportSummary.manifests && props.reportSummary.manifests.length === 1) { + const manifest = props.reportSummary?.manifests[0]; + if (manifest && manifest.type === "strategyProperty" && manifest.strategyProperty === "position") { + const positionHistoryFetcher = fetchPositionHistory(props.basePath, props.runID, manifest.filename).then((data) => { + chartData.positionHistory = data; + }); + fetchers.push(positionHistoryFetcher); + } + } + + const kLinesFetcher = fetchKLines(props.basePath, props.runID, props.symbol, currentInterval, new Date(props.reportSummary.startTime), new Date(props.reportSummary.endTime)).then((klines) => { + chartData.klines = removeDuplicatedKLines(klines as Array) + }); + fetchers.push(kLinesFetcher); + + Promise.all(fetchers).then(() => { + console.log("createChart") + + if (chart.current) { + chart.current.remove(); + } + + chart.current = createBaseChart(chartContainerRef); + + const series = chart.current.addCandlestickSeries({ + upColor: 'rgb(38,166,154)', + downColor: 'rgb(255,82,82)', + wickUpColor: 'rgb(38,166,154)', + wickDownColor: 'rgb(255,82,82)', + borderVisible: false, + }); + series.setData(chartData.klines); + series.setMarkers(chartData.markers); + + const volumeData = klinesToVolumeData(chartData.klines); + const volumeSeries = chart.current.addHistogramSeries({ + color: '#182233', + lineWidth: 2, + priceFormat: { + type: 'volume', + }, + overlay: true, + scaleMargins: { + top: 0.8, + bottom: 0, + }, + }); + volumeSeries.setData(volumeData); + + if (chartData.positionHistory) { + const lineSeries = chart.current.addLineSeries(); + const costLine = positionAverageCostHistoryToLineData(currentInterval, chartData.positionHistory); + lineSeries.setData(costLine); + + const baseLineSeries = chart.current.addLineSeries({ + priceScaleId: 'left', + color: '#98338C', + }); + const baseLine = positionBaseHistoryToLineData(currentInterval, chartData.positionHistory) + baseLineSeries.setData(baseLine); + } + + chart.current.timeScale().fitContent(); + }); + + return () => { + if (chart.current) { + chart.current.remove(); + } + }; + }, [props.runID, props.reportSummary, currentInterval]) + + // see: + // https://codesandbox.io/s/9inkb?file=/src/styles.css + useEffect(() => { + resizeObserver.current = new ResizeObserver(entries => { + if (!chart.current) { + return; + } + + const {width, height} = entries[0].contentRect; + chart.current.applyOptions({width, height}); + + setTimeout(() => { + chart.current.timeScale().fitContent(); + }, 0); + }); + + resizeObserver.current.observe(chartContainerRef.current); + return () => resizeObserver.current.disconnect(); + }, []); + + return ( +
+ + {intervals.map((interval) => { + return + })} + +
+
+
+ ); +}; + +export default TradingViewChart; diff --git a/apps/backtest-report/next-env.d.ts b/apps/backtest-report/next-env.d.ts new file mode 100644 index 0000000000..4f11a03dc6 --- /dev/null +++ b/apps/backtest-report/next-env.d.ts @@ -0,0 +1,5 @@ +/// +/// + +// NOTE: This file should not be edited +// see https://nextjs.org/docs/basic-features/typescript for more information. diff --git a/apps/backtest-report/next.config.js b/apps/backtest-report/next.config.js new file mode 100644 index 0000000000..bd686c9919 --- /dev/null +++ b/apps/backtest-report/next.config.js @@ -0,0 +1,33 @@ + +// workaround for react financial charts +// https://github.com/react-financial/react-financial-charts/issues/606 + +// workaround for lightweight chart +// https://stackoverflow.com/questions/65936222/next-js-syntaxerror-unexpected-token-export +// https://stackoverflow.com/questions/66244968/cannot-use-import-statement-outside-a-module-error-when-importing-react-hook-m +const withTM = require('next-transpile-modules')([ + 'lightweight-charts', + 'fancy-canvas', + // 'd3-array', + // 'd3-format', + // 'd3-time', + // 'd3-time-format', + // 'react-financial-charts', + // '@react-financial-charts/annotations', + // '@react-financial-charts/axes', + // '@react-financial-charts/coordinates', + // '@react-financial-charts/core', + // '@react-financial-charts/indicators', + // '@react-financial-charts/interactive', + // '@react-financial-charts/scales', + // '@react-financial-charts/series', + // '@react-financial-charts/tooltip', + // '@react-financial-charts/utils', +]); + +/** @type {import('next').NextConfig} */ +const nextConfig = { + reactStrictMode: false, +} + +module.exports = withTM(nextConfig); diff --git a/apps/backtest-report/package.json b/apps/backtest-report/package.json new file mode 100644 index 0000000000..dcc8a8d0df --- /dev/null +++ b/apps/backtest-report/package.json @@ -0,0 +1,38 @@ +{ + "name": "bbgo-backtest-report", + "version": "0.1.0", + "private": true, + "scripts": { + "dev": "yarn run next dev", + "build": "yarn run next build", + "start": "next start", + "lint": "next lint", + "export": "yarn run next build && yarn run next export" + }, + "dependencies": { + "@mantine/core": "^4.2.5", + "@mantine/hooks": "^4.2.5", + "@mantine/next": "^4.2.5", + "d3-dsv": "^3.0.1", + "d3-format": "^3.1.0", + "d3-time-format": "^4.1.0", + "lightweight-charts": "^3.8.0", + "moment": "^2.29.3", + "next": "12.1.6", + "react": "18.1.0", + "react-dom": "18.1.0", + "tabler-icons-react": "^1.48.0" + }, + "devDependencies": { + "@types/d3-dsv": "^3.0.0", + "@types/d3-format": "^3.0.1", + "@types/d3-time-format": "^4.0.0", + "@types/node": "17.0.31", + "@types/react": "18.0.8", + "@types/react-dom": "18.0.3", + "eslint": "8.14.0", + "eslint-config-next": "12.1.6", + "next-transpile-modules": "^9.0.0", + "typescript": "4.6.4" + } +} diff --git a/apps/backtest-report/pages/_app.tsx b/apps/backtest-report/pages/_app.tsx new file mode 100644 index 0000000000..300e8a516d --- /dev/null +++ b/apps/backtest-report/pages/_app.tsx @@ -0,0 +1,26 @@ +import '../styles/globals.css' +import type {AppProps} from 'next/app' +import Head from 'next/head'; + +import { MantineProvider } from '@mantine/core'; + +function MyApp({Component, pageProps}: AppProps) { + return <> + + BBGO Back-test Report + + + + + + +} + +export default MyApp diff --git a/apps/backtest-report/pages/_document.tsx b/apps/backtest-report/pages/_document.tsx new file mode 100644 index 0000000000..93210acd4f --- /dev/null +++ b/apps/backtest-report/pages/_document.tsx @@ -0,0 +1,44 @@ +import Document, {DocumentContext, Head, Html, Main, NextScript} from 'next/document'; + +// ----- mantine setup +import {createStylesServer, ServerStyles} from '@mantine/next'; +import {DocumentInitialProps} from "next/dist/shared/lib/utils"; + +// const getInitialProps = createGetInitialProps(); +const stylesServer = createStylesServer(); +// ----- + +class MyDocument extends Document { + // this is for mantine + // static getInitialProps = getInitialProps; + + static async getInitialProps(ctx: DocumentContext): Promise { + const initialProps = await Document.getInitialProps(ctx); + + return { + ...initialProps, + + // use bracket [] instead of () to fix the type error + styles: [ + <> + {initialProps.styles} + + + ], + }; + } + + render() { + return ( + + + +
+ + + + ); + } +} + +export default MyDocument; diff --git a/apps/backtest-report/pages/api/hello.ts b/apps/backtest-report/pages/api/hello.ts new file mode 100644 index 0000000000..f8bcc7e5ca --- /dev/null +++ b/apps/backtest-report/pages/api/hello.ts @@ -0,0 +1,13 @@ +// Next.js API route support: https://nextjs.org/docs/api-routes/introduction +import type { NextApiRequest, NextApiResponse } from 'next' + +type Data = { + name: string +} + +export default function handler( + req: NextApiRequest, + res: NextApiResponse +) { + res.status(200).json({ name: 'John Doe' }) +} diff --git a/apps/backtest-report/pages/index.tsx b/apps/backtest-report/pages/index.tsx new file mode 100644 index 0000000000..7858995cfe --- /dev/null +++ b/apps/backtest-report/pages/index.tsx @@ -0,0 +1,51 @@ +import type {NextPage} from 'next' +import Head from 'next/head' +import styles from '../styles/Home.module.css' +import { useRouter } from "next/router"; +import {AppShell, Header, Navbar, Text} from '@mantine/core'; + +import ReportDetails from '../components/ReportDetails'; +import ReportNavigator from '../components/ReportNavigator'; +import {useEffect, useState} from "react"; + +const Home: NextPage = () => { + const [currentReport, setCurrentReport] = useState(); + const { query } = useRouter(); + const basePath = query.basePath ? query.basePath as string : '/output'; + + return ( +
+ + BBGO Back-Test Report + + + +
+ + + { + setCurrentReport(reportEntry) + }}/> + + } + header={ +
+ BBGO Back-Test Report +
+ } + styles={(theme) => ({ + main: {backgroundColor: theme.colorScheme === 'dark' ? theme.colors.dark[8] : theme.colors.gray[0]}, + })} + > + { + currentReport ? : null + } +
+
+
+ ) +} + +export default Home diff --git a/apps/backtest-report/public/favicon.ico b/apps/backtest-report/public/favicon.ico new file mode 100644 index 0000000000..718d6fea48 Binary files /dev/null and b/apps/backtest-report/public/favicon.ico differ diff --git a/apps/backtest-report/public/vercel.svg b/apps/backtest-report/public/vercel.svg new file mode 100644 index 0000000000..fbf0e25a65 --- /dev/null +++ b/apps/backtest-report/public/vercel.svg @@ -0,0 +1,4 @@ + + + \ No newline at end of file diff --git a/apps/backtest-report/src/Chart.js b/apps/backtest-report/src/Chart.js new file mode 100644 index 0000000000..628261cebd --- /dev/null +++ b/apps/backtest-report/src/Chart.js @@ -0,0 +1,131 @@ + +import { format } from "d3-format"; +import { timeFormat } from "d3-time-format"; + +import { ChartCanvas, Chart } from "react-stockcharts"; +import { + CandlestickSeries, + LineSeries, +} from "react-stockcharts/lib/series"; +import { XAxis, YAxis } from "react-stockcharts/lib/axes"; +import { + CrossHairCursor, + EdgeIndicator, + CurrentCoordinate, + MouseCoordinateX, + MouseCoordinateY, +} from "react-stockcharts/lib/coordinates"; + +import { LabelAnnotation, Label, Annotate } from "react-stockcharts/lib/annotation"; +import { discontinuousTimeScaleProvider } from "react-stockcharts/lib/scale"; +import { OHLCTooltip, MovingAverageTooltip } from "react-stockcharts/lib/tooltip"; +import { ema } from "react-stockcharts/lib/indicator"; +// import { fitWidth } from "react-stockcharts/lib/helper"; +import { last } from "react-stockcharts/lib/utils"; + +let CandleStickChartWithAnnotation = function(props) { + const annotationProps = { + fontFamily: "Glyphicons Halflings", + fontSize: 20, + fill: "#060F8F", + opacity: 0.8, + text: "\ue182", + y: ({ yScale }) => yScale.range()[0], + onClick: console.log.bind(console), + tooltip: d => timeFormat("%B")(d.date), + // onMouseOver: console.log.bind(console), + }; + + const margin = { left: 80, right: 80, top: 30, bottom: 50 }; + const height = 400; + const { type, data: initialData, width, ratio } = props; + + const [yAxisLabelX, yAxisLabelY] = [ + width - margin.left - 40, + (height - margin.top - margin.bottom) / 2 + ]; + + const xScaleProvider = discontinuousTimeScaleProvider + .inputDateAccessor(d => d.date); + const { + data, + xScale, + xAccessor, + displayXAccessor, + } = xScaleProvider(initialData); + + const start = xAccessor(last(data)); + const end = xAccessor(data[Math.max(0, data.length - 150)]); + const xExtents = [start, end]; + + return ( + + + + ); +} + +/* +CandleStickChartWithAnnotation.propTypes = { + data: PropTypes.array.isRequired, + width: PropTypes.number.isRequired, + ratio: PropTypes.number.isRequired, + type: PropTypes.oneOf(["svg", "hybrid"]).isRequired, +}; + +CandleStickChartWithAnnotation.defaultProps = { + type: "svg", +}; +*/ + +// CandleStickChartWithAnnotation = fitWidth(CandleStickChartWithAnnotation); + +export default CandleStickChartWithAnnotation; diff --git a/apps/backtest-report/src/utils.js b/apps/backtest-report/src/utils.js new file mode 100644 index 0000000000..bee7ef34f6 --- /dev/null +++ b/apps/backtest-report/src/utils.js @@ -0,0 +1,3 @@ +import { tsvParse, csvParse } from "d3-dsv"; +import { timeParse } from "d3-time-format"; + diff --git a/apps/backtest-report/styles/Home.module.css b/apps/backtest-report/styles/Home.module.css new file mode 100644 index 0000000000..8105a23794 --- /dev/null +++ b/apps/backtest-report/styles/Home.module.css @@ -0,0 +1,78 @@ +.main { + min-height: 100vh; +} + +.footer { + display: flex; + flex: 1; + padding: 2rem 0; + border-top: 1px solid #eaeaea; + justify-content: center; + align-items: center; +} + +.footer a { + display: flex; + justify-content: center; + align-items: center; + flex-grow: 1; +} + +.code { + background: #fafafa; + border-radius: 5px; + padding: 0.75rem; + font-size: 1.1rem; + font-family: Menlo, Monaco, Lucida Console, Liberation Mono, DejaVu Sans Mono, + Bitstream Vera Sans Mono, Courier New, monospace; +} + +.grid { + display: flex; + align-items: center; + justify-content: center; + flex-wrap: wrap; + max-width: 800px; +} + +.card { + margin: 1rem; + padding: 1.5rem; + text-align: left; + color: inherit; + text-decoration: none; + border: 1px solid #eaeaea; + border-radius: 10px; + transition: color 0.15s ease, border-color 0.15s ease; + max-width: 300px; +} + +.card:hover, +.card:focus, +.card:active { + color: #0070f3; + border-color: #0070f3; +} + +.card h2 { + margin: 0 0 1rem 0; + font-size: 1.5rem; +} + +.card p { + margin: 0; + font-size: 1.25rem; + line-height: 1.5; +} + +.logo { + height: 1em; + margin-left: 0.5rem; +} + +@media (max-width: 600px) { + .grid { + width: 100%; + flex-direction: column; + } +} diff --git a/apps/backtest-report/styles/globals.css b/apps/backtest-report/styles/globals.css new file mode 100644 index 0000000000..e5e2dcc23b --- /dev/null +++ b/apps/backtest-report/styles/globals.css @@ -0,0 +1,16 @@ +html, +body { + padding: 0; + margin: 0; + font-family: -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, Oxygen, + Ubuntu, Cantarell, Fira Sans, Droid Sans, Helvetica Neue, sans-serif; +} + +a { + color: inherit; + text-decoration: none; +} + +* { + box-sizing: border-box; +} diff --git a/apps/backtest-report/tsconfig.json b/apps/backtest-report/tsconfig.json new file mode 100644 index 0000000000..99710e8578 --- /dev/null +++ b/apps/backtest-report/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "target": "es5", + "lib": ["dom", "dom.iterable", "esnext"], + "allowJs": true, + "skipLibCheck": true, + "strict": true, + "forceConsistentCasingInFileNames": true, + "noEmit": true, + "esModuleInterop": true, + "module": "esnext", + "moduleResolution": "node", + "resolveJsonModule": true, + "isolatedModules": true, + "jsx": "preserve", + "incremental": true + }, + "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"], + "exclude": ["node_modules"] +} diff --git a/apps/backtest-report/types/index.ts b/apps/backtest-report/types/index.ts new file mode 100644 index 0000000000..ee9c9f7545 --- /dev/null +++ b/apps/backtest-report/types/index.ts @@ -0,0 +1 @@ +export * from './report'; diff --git a/apps/backtest-report/types/market.ts b/apps/backtest-report/types/market.ts new file mode 100644 index 0000000000..6b1d2534d3 --- /dev/null +++ b/apps/backtest-report/types/market.ts @@ -0,0 +1,16 @@ +export interface Market { + symbol: string; + localSymbol: string; + pricePrecision: number; + volumePrecision: number; + quoteCurrency: string; + baseCurrency: string; + minNotional: number; + minAmount: number; + minQuantity: number; + maxQuantity: number; + stepSize: number; + minPrice: number; + maxPrice: number; + tickSize: number; +} diff --git a/apps/backtest-report/types/report.ts b/apps/backtest-report/types/report.ts new file mode 100644 index 0000000000..5a0f516470 --- /dev/null +++ b/apps/backtest-report/types/report.ts @@ -0,0 +1,76 @@ +import {Market} from './market'; + +export interface ReportEntry { + id: string; + + config: object; + + time: string; +} + +export interface ReportIndex { + runs: Array; +} + +export interface Balance { + currency: string; + available: number; + locked: number; + borrowed: number; +} + +export interface BalanceMap { + [currency: string]: Balance; +} + +export interface ReportSummary { + startTime: Date; + endTime: Date; + sessions: string[]; + symbols: string[]; + intervals: string[]; + initialTotalBalances: BalanceMap; + finalTotalBalances: BalanceMap; + symbolReports: SymbolReport[]; + manifests: Manifest[]; +} + +export interface SymbolReport { + exchange: string; + symbol: string; + market: Market; + lastPrice: number; + startPrice: number; + pnl: PnL; + initialBalances: BalanceMap; + finalBalances: BalanceMap; +} + + +export interface Manifest { + type: string; + filename: string; + strategyID: string; + strategyInstance: string; + strategyProperty: string; +} + +export interface CurrencyFeeMap { + [currency: string]: number; +} + +export interface PnL { + lastPrice: number; + startTime: Date; + symbol: string; + market: Market; + numTrades: number; + profit: number; + netProfit: number; + unrealizedProfit: number; + averageCost: number; + buyVolume: number; + sellVolume: number; + feeInUSD: number; + currencyFees: CurrencyFeeMap; +} diff --git a/apps/backtest-report/yarn.lock b/apps/backtest-report/yarn.lock new file mode 100644 index 0000000000..6659a6e513 --- /dev/null +++ b/apps/backtest-report/yarn.lock @@ -0,0 +1,2361 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@babel/runtime-corejs3@^7.10.2": + version "7.17.9" + resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.17.9.tgz#3d02d0161f0fbf3ada8e88159375af97690f4055" + integrity sha512-WxYHHUWF2uZ7Hp1K+D1xQgbgkGUfA+5UPOegEXGt2Y5SMog/rYCVaifLZDbw8UkNXozEqqrZTy6bglL7xTaCOw== + dependencies: + core-js-pure "^3.20.2" + regenerator-runtime "^0.13.4" + +"@babel/runtime@^7.10.2", "@babel/runtime@^7.13.10", "@babel/runtime@^7.16.3": + version "7.17.9" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.17.9.tgz#d19fbf802d01a8cb6cf053a64e472d42c434ba72" + integrity sha512-lSiBBvodq29uShpWGNbgFdKYNiFDo5/HIYsaCEY9ff4sb10x9jizo2+pRrSyF4jKZCXqgzuqBOQKbUm90gQwJg== + dependencies: + regenerator-runtime "^0.13.4" + +"@emotion/cache@11.7.1", "@emotion/cache@^11.7.1": + version "11.7.1" + resolved "https://registry.yarnpkg.com/@emotion/cache/-/cache-11.7.1.tgz#08d080e396a42e0037848214e8aa7bf879065539" + integrity sha512-r65Zy4Iljb8oyjtLeCuBH8Qjiy107dOYC6SJq7g7GV5UCQWMObY4SJDPGFjiiVpPrOJ2hmJOoBiYTC7hwx9E2A== + dependencies: + "@emotion/memoize" "^0.7.4" + "@emotion/sheet" "^1.1.0" + "@emotion/utils" "^1.0.0" + "@emotion/weak-memoize" "^0.2.5" + stylis "4.0.13" + +"@emotion/hash@^0.8.0": + version "0.8.0" + resolved "https://registry.yarnpkg.com/@emotion/hash/-/hash-0.8.0.tgz#bbbff68978fefdbe68ccb533bc8cbe1d1afb5413" + integrity sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow== + +"@emotion/memoize@^0.7.4": + version "0.7.5" + resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.7.5.tgz#2c40f81449a4e554e9fc6396910ed4843ec2be50" + integrity sha512-igX9a37DR2ZPGYtV6suZ6whr8pTFtyHL3K/oLUotxpSVO2ASaprmAe2Dkq7tBo7CRY7MMDrAa9nuQP9/YG8FxQ== + +"@emotion/react@11.7.1": + version "11.7.1" + resolved "https://registry.yarnpkg.com/@emotion/react/-/react-11.7.1.tgz#3f800ce9b20317c13e77b8489ac4a0b922b2fe07" + integrity sha512-DV2Xe3yhkF1yT4uAUoJcYL1AmrnO5SVsdfvu+fBuS7IbByDeTVx9+wFmvx9Idzv7/78+9Mgx2Hcmr7Fex3tIyw== + dependencies: + "@babel/runtime" "^7.13.10" + "@emotion/cache" "^11.7.1" + "@emotion/serialize" "^1.0.2" + "@emotion/sheet" "^1.1.0" + "@emotion/utils" "^1.0.0" + "@emotion/weak-memoize" "^0.2.5" + hoist-non-react-statics "^3.3.1" + +"@emotion/serialize@1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@emotion/serialize/-/serialize-1.0.2.tgz#77cb21a0571c9f68eb66087754a65fa97bfcd965" + integrity sha512-95MgNJ9+/ajxU7QIAruiOAdYNjxZX7G2mhgrtDWswA21VviYIRP1R5QilZ/bDY42xiKsaktP4egJb3QdYQZi1A== + dependencies: + "@emotion/hash" "^0.8.0" + "@emotion/memoize" "^0.7.4" + "@emotion/unitless" "^0.7.5" + "@emotion/utils" "^1.0.0" + csstype "^3.0.2" + +"@emotion/serialize@^1.0.2": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@emotion/serialize/-/serialize-1.0.3.tgz#99e2060c26c6292469fb30db41f4690e1c8fea63" + integrity sha512-2mSSvgLfyV3q+iVh3YWgNlUc2a9ZlDU7DjuP5MjK3AXRR0dYigCrP99aeFtaB2L/hjfEZdSThn5dsZ0ufqbvsA== + dependencies: + "@emotion/hash" "^0.8.0" + "@emotion/memoize" "^0.7.4" + "@emotion/unitless" "^0.7.5" + "@emotion/utils" "^1.0.0" + csstype "^3.0.2" + +"@emotion/server@11.4.0": + version "11.4.0" + resolved "https://registry.yarnpkg.com/@emotion/server/-/server-11.4.0.tgz#3ae1d74cb31c7d013c3c76e88c0c4439076e9f66" + integrity sha512-IHovdWA3V0DokzxLtUNDx4+hQI82zUXqQFcVz/om2t44O0YSc+NHB+qifnyAOoQwt3SXcBTgaSntobwUI9gnfA== + dependencies: + "@emotion/utils" "^1.0.0" + html-tokenize "^2.0.0" + multipipe "^1.0.2" + through "^2.3.8" + +"@emotion/sheet@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@emotion/sheet/-/sheet-1.1.0.tgz#56d99c41f0a1cda2726a05aa6a20afd4c63e58d2" + integrity sha512-u0AX4aSo25sMAygCuQTzS+HsImZFuS8llY8O7b9MDRzbJM0kVJlAz6KNDqcG7pOuQZJmj/8X/rAW+66kMnMW+g== + +"@emotion/unitless@^0.7.5": + version "0.7.5" + resolved "https://registry.yarnpkg.com/@emotion/unitless/-/unitless-0.7.5.tgz#77211291c1900a700b8a78cfafda3160d76949ed" + integrity sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg== + +"@emotion/utils@1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@emotion/utils/-/utils-1.0.0.tgz#abe06a83160b10570816c913990245813a2fd6af" + integrity sha512-mQC2b3XLDs6QCW+pDQDiyO/EdGZYOygE8s5N5rrzjSI4M3IejPE/JPndCBwRT9z982aqQNi6beWs1UeayrQxxA== + +"@emotion/utils@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@emotion/utils/-/utils-1.1.0.tgz#86b0b297f3f1a0f2bdb08eeac9a2f49afd40d0cf" + integrity sha512-iRLa/Y4Rs5H/f2nimczYmS5kFJEbpiVvgN3XVfZ022IYhuNA1IRSHEizcof88LtCTXtl9S2Cxt32KgaXEu72JQ== + +"@emotion/weak-memoize@^0.2.5": + version "0.2.5" + resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.2.5.tgz#8eed982e2ee6f7f4e44c253e12962980791efd46" + integrity sha512-6U71C2Wp7r5XtFtQzYrW5iKFT67OixrSxjI4MptCHzdSVlgabczzqLe0ZSgnub/5Kp4hSbpDB1tMytZY9pwxxA== + +"@eslint/eslintrc@^1.2.2": + version "1.2.3" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-1.2.3.tgz#fcaa2bcef39e13d6e9e7f6271f4cc7cae1174886" + integrity sha512-uGo44hIwoLGNyduRpjdEpovcbMdd+Nv7amtmJxnKmI8xj6yd5LncmSwDa5NgX/41lIFJtkjD6YdVfgEzPfJ5UA== + dependencies: + ajv "^6.12.4" + debug "^4.3.2" + espree "^9.3.2" + globals "^13.9.0" + ignore "^5.2.0" + import-fresh "^3.2.1" + js-yaml "^4.1.0" + minimatch "^3.1.2" + strip-json-comments "^3.1.1" + +"@humanwhocodes/config-array@^0.9.2": + version "0.9.5" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.9.5.tgz#2cbaf9a89460da24b5ca6531b8bbfc23e1df50c7" + integrity sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw== + dependencies: + "@humanwhocodes/object-schema" "^1.2.1" + debug "^4.1.1" + minimatch "^3.0.4" + +"@humanwhocodes/object-schema@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== + +"@mantine/core@^4.2.5": + version "4.2.5" + resolved "https://registry.yarnpkg.com/@mantine/core/-/core-4.2.5.tgz#cc139992b3980043fce17db50e09e25dab261fe0" + integrity sha512-A/mixxpmCsA9acYVrCAurHrj/ikyLGksPqFILPJJpjC4uUDm85HHqVUa209VVLAKlvr26tX6CzDVqUeSe6s3Zg== + dependencies: + "@mantine/styles" "4.2.5" + "@popperjs/core" "^2.9.3" + "@radix-ui/react-scroll-area" "^0.1.1" + react-popper "^2.2.5" + react-textarea-autosize "^8.3.2" + +"@mantine/hooks@^4.2.5": + version "4.2.5" + resolved "https://registry.yarnpkg.com/@mantine/hooks/-/hooks-4.2.5.tgz#b1c964e6f59b5d86fb579a43daea158a7ec9d6aa" + integrity sha512-CEpdUXPAC28rXosgo/Wxvs3ch9qC+QYfqh4AFkOH0+EKdlXkD1xRN8vv6pd5AHJtRZvGf7CZDuGKSjWgssvgeA== + +"@mantine/next@^4.2.5": + version "4.2.5" + resolved "https://registry.yarnpkg.com/@mantine/next/-/next-4.2.5.tgz#d1c5bdf99bfde7e1d1da1f8978bf5ec355800e6e" + integrity sha512-3hwe8pzKmIvdfG8rRlPMzd3mbg+I/U26axzLeE8gtK/OH+0PmYAq1Zle99aHDAbwPi5vFyLaHwGLXFHspdrjfA== + dependencies: + "@mantine/ssr" "4.2.5" + +"@mantine/ssr@4.2.5": + version "4.2.5" + resolved "https://registry.yarnpkg.com/@mantine/ssr/-/ssr-4.2.5.tgz#745c0b20e255bc29e9a67985a25c89668d1ed1ba" + integrity sha512-3Nt5PjCSKylAeyg1j6yct5hoX45J0jvxCwblphnhcYL+YVdNdaI2KL5Dg+KE/wcIAzOATquidUCFSHUwqcUVkg== + dependencies: + "@emotion/cache" "11.7.1" + "@emotion/react" "11.7.1" + "@emotion/serialize" "1.0.2" + "@emotion/server" "11.4.0" + "@emotion/utils" "1.0.0" + "@mantine/styles" "4.2.5" + csstype "3.0.9" + html-react-parser "1.3.0" + +"@mantine/styles@4.2.5": + version "4.2.5" + resolved "https://registry.yarnpkg.com/@mantine/styles/-/styles-4.2.5.tgz#a8e5bd6f601dee63aeedc5f09f2098dd1473d2da" + integrity sha512-A6sIm3+Aa4ZqpaIqpmiaCmkAJI4ow9vwmCgpvuwWYCspBhWTWQWtdpnNyWfnTjszYY1uSnx9mb50JlIsglgLNQ== + dependencies: + "@emotion/cache" "11.7.1" + "@emotion/react" "11.7.1" + "@emotion/serialize" "1.0.2" + "@emotion/utils" "1.0.0" + clsx "^1.1.1" + csstype "3.0.9" + +"@next/env@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/env/-/env-12.1.6.tgz#5f44823a78335355f00f1687cfc4f1dafa3eca08" + integrity sha512-Te/OBDXFSodPU6jlXYPAXpmZr/AkG6DCATAxttQxqOWaq6eDFX25Db3dK0120GZrSZmv4QCe9KsZmJKDbWs4OA== + +"@next/eslint-plugin-next@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/eslint-plugin-next/-/eslint-plugin-next-12.1.6.tgz#dde3f98831f15923b25244588d924c716956292e" + integrity sha512-yNUtJ90NEiYFT6TJnNyofKMPYqirKDwpahcbxBgSIuABwYOdkGwzos1ZkYD51Qf0diYwpQZBeVqElTk7Q2WNqw== + dependencies: + glob "7.1.7" + +"@next/swc-android-arm-eabi@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-android-arm-eabi/-/swc-android-arm-eabi-12.1.6.tgz#79a35349b98f2f8c038ab6261aa9cd0d121c03f9" + integrity sha512-BxBr3QAAAXWgk/K7EedvzxJr2dE014mghBSA9iOEAv0bMgF+MRq4PoASjuHi15M2zfowpcRG8XQhMFtxftCleQ== + +"@next/swc-android-arm64@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-android-arm64/-/swc-android-arm64-12.1.6.tgz#ec08ea61794f8752c8ebcacbed0aafc5b9407456" + integrity sha512-EboEk3ROYY7U6WA2RrMt/cXXMokUTXXfnxe2+CU+DOahvbrO8QSWhlBl9I9ZbFzJx28AGB9Yo3oQHCvph/4Lew== + +"@next/swc-darwin-arm64@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-12.1.6.tgz#d1053805615fd0706e9b1667893a72271cd87119" + integrity sha512-P0EXU12BMSdNj1F7vdkP/VrYDuCNwBExtRPDYawgSUakzi6qP0iKJpya2BuLvNzXx+XPU49GFuDC5X+SvY0mOw== + +"@next/swc-darwin-x64@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-12.1.6.tgz#2d1b926a22f4c5230d5b311f9c56cfdcc406afec" + integrity sha512-9FptMnbgHJK3dRDzfTpexs9S2hGpzOQxSQbe8omz6Pcl7rnEp9x4uSEKY51ho85JCjL4d0tDLBcXEJZKKLzxNg== + +"@next/swc-linux-arm-gnueabihf@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-linux-arm-gnueabihf/-/swc-linux-arm-gnueabihf-12.1.6.tgz#c021918d2a94a17f823106a5e069335b8a19724f" + integrity sha512-PvfEa1RR55dsik/IDkCKSFkk6ODNGJqPY3ysVUZqmnWMDSuqFtf7BPWHFa/53znpvVB5XaJ5Z1/6aR5CTIqxPw== + +"@next/swc-linux-arm64-gnu@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-12.1.6.tgz#ac55c07bfabde378dfa0ce2b8fc1c3b2897e81ae" + integrity sha512-53QOvX1jBbC2ctnmWHyRhMajGq7QZfl974WYlwclXarVV418X7ed7o/EzGY+YVAEKzIVaAB9JFFWGXn8WWo0gQ== + +"@next/swc-linux-arm64-musl@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-12.1.6.tgz#e429f826279894be9096be6bec13e75e3d6bd671" + integrity sha512-CMWAkYqfGdQCS+uuMA1A2UhOfcUYeoqnTW7msLr2RyYAys15pD960hlDfq7QAi8BCAKk0sQ2rjsl0iqMyziohQ== + +"@next/swc-linux-x64-gnu@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-12.1.6.tgz#1f276c0784a5ca599bfa34b2fcc0b38f3a738e08" + integrity sha512-AC7jE4Fxpn0s3ujngClIDTiEM/CQiB2N2vkcyWWn6734AmGT03Duq6RYtPMymFobDdAtZGFZd5nR95WjPzbZAQ== + +"@next/swc-linux-x64-musl@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-12.1.6.tgz#1d9933dd6ba303dcfd8a2acd6ac7c27ed41e2eea" + integrity sha512-c9Vjmi0EVk0Kou2qbrynskVarnFwfYIi+wKufR9Ad7/IKKuP6aEhOdZiIIdKsYWRtK2IWRF3h3YmdnEa2WLUag== + +"@next/swc-win32-arm64-msvc@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-12.1.6.tgz#2ef9837f12ca652b1783d72ecb86208906042f02" + integrity sha512-3UTOL/5XZSKFelM7qN0it35o3Cegm6LsyuERR3/OoqEExyj3aCk7F025b54/707HTMAnjlvQK3DzLhPu/xxO4g== + +"@next/swc-win32-ia32-msvc@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-12.1.6.tgz#74003d0aa1c59dfa56cb15481a5c607cbc0027b9" + integrity sha512-8ZWoj6nCq6fI1yCzKq6oK0jE6Mxlz4MrEsRyu0TwDztWQWe7rh4XXGLAa2YVPatYcHhMcUL+fQQbqd1MsgaSDA== + +"@next/swc-win32-x64-msvc@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-12.1.6.tgz#a350caf42975e7197b24b495b8d764eec7e6a36e" + integrity sha512-4ZEwiRuZEicXhXqmhw3+de8Z4EpOLQj/gp+D9fFWo6ii6W1kBkNNvvEx4A90ugppu+74pT1lIJnOuz3A9oQeJA== + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@popperjs/core@^2.9.3": + version "2.11.5" + resolved "https://registry.yarnpkg.com/@popperjs/core/-/core-2.11.5.tgz#db5a11bf66bdab39569719555b0f76e138d7bd64" + integrity sha512-9X2obfABZuDVLCgPK9aX0a/x4jaOEweTTWE2+9sr0Qqqevj2Uv5XorvusThmc9XGYpS9yI+fhh8RTafBtGposw== + +"@radix-ui/number@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@radix-ui/number/-/number-0.1.0.tgz#73ad13d5cc5f75fa5e147d72e5d5d5e50d688256" + integrity sha512-rpf6QiOWLHAkM4FEMYu9i+5Jr8cKT893+R4mPpcdsy4LD7omr9JfdOqj/h/xPA5+EcVrpMMlU6rrRYpUB5UI8g== + dependencies: + "@babel/runtime" "^7.13.10" + +"@radix-ui/primitive@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@radix-ui/primitive/-/primitive-0.1.0.tgz#6206b97d379994f0d1929809db035733b337e543" + integrity sha512-tqxZKybwN5Fa3VzZry4G6mXAAb9aAqKmPtnVbZpL0vsBwvOHTBwsjHVPXylocYLwEtBY9SCe665bYnNB515uoA== + dependencies: + "@babel/runtime" "^7.13.10" + +"@radix-ui/react-compose-refs@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@radix-ui/react-compose-refs/-/react-compose-refs-0.1.0.tgz#cff6e780a0f73778b976acff2c2a5b6551caab95" + integrity sha512-eyclbh+b77k+69Dk72q3694OHrn9B3QsoIRx7ywX341U9RK1ThgQjMFZoPtmZNQTksXHLNEiefR8hGVeFyInGg== + dependencies: + "@babel/runtime" "^7.13.10" + +"@radix-ui/react-context@0.1.1": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-context/-/react-context-0.1.1.tgz#06996829ea124d9a1bc1dbe3e51f33588fab0875" + integrity sha512-PkyVX1JsLBioeu0jB9WvRpDBBLtLZohVDT3BB5CTSJqActma8S8030P57mWZb4baZifMvN7KKWPAA40UmWKkQg== + dependencies: + "@babel/runtime" "^7.13.10" + +"@radix-ui/react-presence@0.1.2": + version "0.1.2" + resolved "https://registry.yarnpkg.com/@radix-ui/react-presence/-/react-presence-0.1.2.tgz#9f11cce3df73cf65bc348e8b76d891f0d54c1fe3" + integrity sha512-3BRlFZraooIUfRlyN+b/Xs5hq1lanOOo/+3h6Pwu2GMFjkGKKa4Rd51fcqGqnVlbr3jYg+WLuGyAV4KlgqwrQw== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/react-compose-refs" "0.1.0" + "@radix-ui/react-use-layout-effect" "0.1.0" + +"@radix-ui/react-primitive@0.1.4": + version "0.1.4" + resolved "https://registry.yarnpkg.com/@radix-ui/react-primitive/-/react-primitive-0.1.4.tgz#6c233cf08b0cb87fecd107e9efecb3f21861edc1" + integrity sha512-6gSl2IidySupIMJFjYnDIkIWRyQdbu/AHK7rbICPani+LW4b0XdxBXc46og/iZvuwW8pjCS8I2SadIerv84xYA== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/react-slot" "0.1.2" + +"@radix-ui/react-scroll-area@^0.1.1": + version "0.1.4" + resolved "https://registry.yarnpkg.com/@radix-ui/react-scroll-area/-/react-scroll-area-0.1.4.tgz#be1d32c113ee9f64e3d2e7ee3983d98f00b42038" + integrity sha512-QHxRsjy+hsHwQYJ9cCNgSJ5+6ioZu1KhwD1UOXoHNciuFGMX08v+uJPKXIz+ySv03Rx6cOz6f/Fk5aPHRMFi/A== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/number" "0.1.0" + "@radix-ui/primitive" "0.1.0" + "@radix-ui/react-compose-refs" "0.1.0" + "@radix-ui/react-context" "0.1.1" + "@radix-ui/react-presence" "0.1.2" + "@radix-ui/react-primitive" "0.1.4" + "@radix-ui/react-use-callback-ref" "0.1.0" + "@radix-ui/react-use-direction" "0.1.0" + "@radix-ui/react-use-layout-effect" "0.1.0" + +"@radix-ui/react-slot@0.1.2": + version "0.1.2" + resolved "https://registry.yarnpkg.com/@radix-ui/react-slot/-/react-slot-0.1.2.tgz#e6f7ad9caa8ce81cc8d532c854c56f9b8b6307c8" + integrity sha512-ADkqfL+agEzEguU3yS26jfB50hRrwf7U4VTwAOZEmi/g+ITcBWe12yM46ueS/UCIMI9Py+gFUaAdxgxafFvY2Q== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/react-compose-refs" "0.1.0" + +"@radix-ui/react-use-callback-ref@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-0.1.0.tgz#934b6e123330f5b3a6b116460e6662cbc663493f" + integrity sha512-Va041McOFFl+aV+sejvl0BS2aeHx86ND9X/rVFmEFQKTXCp6xgUK0NGUAGcgBlIjnJSbMYPGEk1xKSSlVcN2Aw== + dependencies: + "@babel/runtime" "^7.13.10" + +"@radix-ui/react-use-direction@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-direction/-/react-use-direction-0.1.0.tgz#97ac1d52e497c974389e7988f809238ed72e7df7" + integrity sha512-NajpY/An9TCPSfOVkgWIdXJV+VuWl67PxB6kOKYmtNAFHvObzIoh8o0n9sAuwSAyFCZVq211FEf9gvVDRhOyiA== + dependencies: + "@babel/runtime" "^7.13.10" + +"@radix-ui/react-use-layout-effect@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-0.1.0.tgz#ebf71bd6d2825de8f1fbb984abf2293823f0f223" + integrity sha512-+wdeS51Y+E1q1Wmd+1xSSbesZkpVj4jsg0BojCbopWvgq5iBvixw5vgemscdh58ep98BwUbsFYnrywFhV9yrVg== + dependencies: + "@babel/runtime" "^7.13.10" + +"@rushstack/eslint-patch@^1.1.3": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.1.3.tgz#6801033be7ff87a6b7cadaf5b337c9f366a3c4b0" + integrity sha512-WiBSI6JBIhC6LRIsB2Kwh8DsGTlbBU+mLRxJmAe3LjHTdkDpwIbEOZgoXBbZilk/vlfjK8i6nKRAvIRn1XaIMw== + +"@types/d3-dsv@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-dsv/-/d3-dsv-3.0.0.tgz#f3c61fb117bd493ec0e814856feb804a14cfc311" + integrity sha512-o0/7RlMl9p5n6FQDptuJVMxDf/7EDEv2SYEO/CwdG2tr1hTfUVi0Iavkk2ax+VpaQ/1jVhpnj5rq1nj8vwhn2A== + +"@types/d3-format@^3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-format/-/d3-format-3.0.1.tgz#194f1317a499edd7e58766f96735bdc0216bb89d" + integrity sha512-5KY70ifCCzorkLuIkDe0Z9YTf9RR2CjBX1iaJG+rgM/cPP+sO+q9YdQ9WdhQcgPj1EQiJ2/0+yUkkziTG6Lubg== + +"@types/d3-time-format@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-time-format/-/d3-time-format-4.0.0.tgz#ee7b6e798f8deb2d9640675f8811d0253aaa1946" + integrity sha512-yjfBUe6DJBsDin2BMIulhSHmr5qNR5Pxs17+oW4DoVPyVIXZ+m6bs7j1UVKP08Emv6jRmYrYqxYzO63mQxy1rw== + +"@types/json5@^0.0.29": + version "0.0.29" + resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4= + +"@types/node@17.0.31": + version "17.0.31" + resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.31.tgz#a5bb84ecfa27eec5e1c802c6bbf8139bdb163a5d" + integrity sha512-AR0x5HbXGqkEx9CadRH3EBYx/VkiUgZIhP4wvPn/+5KIsgpNoyFaRlVe0Zlx9gRtg8fA06a9tskE2MSN7TcG4Q== + +"@types/prop-types@*": + version "15.7.5" + resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" + integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== + +"@types/react-dom@18.0.3": + version "18.0.3" + resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-18.0.3.tgz#a022ea08c75a476fe5e96b675c3e673363853831" + integrity sha512-1RRW9kst+67gveJRYPxGmVy8eVJ05O43hg77G2j5m76/RFJtMbcfAs2viQ2UNsvvDg8F7OfQZx8qQcl6ymygaQ== + dependencies: + "@types/react" "*" + +"@types/react@*": + version "18.0.9" + resolved "https://registry.yarnpkg.com/@types/react/-/react-18.0.9.tgz#d6712a38bd6cd83469603e7359511126f122e878" + integrity sha512-9bjbg1hJHUm4De19L1cHiW0Jvx3geel6Qczhjd0qY5VKVE2X5+x77YxAepuCwVh4vrgZJdgEJw48zrhRIeF4Nw== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/react@18.0.8": + version "18.0.8" + resolved "https://registry.yarnpkg.com/@types/react/-/react-18.0.8.tgz#a051eb380a9fbcaa404550543c58e1cf5ce4ab87" + integrity sha512-+j2hk9BzCOrrOSJASi5XiOyBbERk9jG5O73Ya4M0env5Ixi6vUNli4qy994AINcEF+1IEHISYFfIT4zwr++LKw== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/scheduler@*": + version "0.16.2" + resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" + integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== + +"@typescript-eslint/parser@^5.21.0": + version "5.23.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.23.0.tgz#443778e1afc9a8ff180f91b5e260ac3bec5e2de1" + integrity sha512-V06cYUkqcGqpFjb8ttVgzNF53tgbB/KoQT/iB++DOIExKmzI9vBJKjZKt/6FuV9c+zrDsvJKbJ2DOCYwX91cbw== + dependencies: + "@typescript-eslint/scope-manager" "5.23.0" + "@typescript-eslint/types" "5.23.0" + "@typescript-eslint/typescript-estree" "5.23.0" + debug "^4.3.2" + +"@typescript-eslint/scope-manager@5.23.0": + version "5.23.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.23.0.tgz#4305e61c2c8e3cfa3787d30f54e79430cc17ce1b" + integrity sha512-EhjaFELQHCRb5wTwlGsNMvzK9b8Oco4aYNleeDlNuL6qXWDF47ch4EhVNPh8Rdhf9tmqbN4sWDk/8g+Z/J8JVw== + dependencies: + "@typescript-eslint/types" "5.23.0" + "@typescript-eslint/visitor-keys" "5.23.0" + +"@typescript-eslint/types@5.23.0": + version "5.23.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.23.0.tgz#8733de0f58ae0ed318dbdd8f09868cdbf9f9ad09" + integrity sha512-NfBsV/h4dir/8mJwdZz7JFibaKC3E/QdeMEDJhiAE3/eMkoniZ7MjbEMCGXw6MZnZDMN3G9S0mH/6WUIj91dmw== + +"@typescript-eslint/typescript-estree@5.23.0": + version "5.23.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.23.0.tgz#dca5f10a0a85226db0796e8ad86addc9aee52065" + integrity sha512-xE9e0lrHhI647SlGMl+m+3E3CKPF1wzvvOEWnuE3CCjjT7UiRnDGJxmAcVKJIlFgK6DY9RB98eLr1OPigPEOGg== + dependencies: + "@typescript-eslint/types" "5.23.0" + "@typescript-eslint/visitor-keys" "5.23.0" + debug "^4.3.2" + globby "^11.0.4" + is-glob "^4.0.3" + semver "^7.3.5" + tsutils "^3.21.0" + +"@typescript-eslint/visitor-keys@5.23.0": + version "5.23.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.23.0.tgz#057c60a7ca64667a39f991473059377a8067c87b" + integrity sha512-Vd4mFNchU62sJB8pX19ZSPog05B0Y0CE2UxAZPT5k4iqhRYjPnqyY3woMxCd0++t9OTqkgjST+1ydLBi7e2Fvg== + dependencies: + "@typescript-eslint/types" "5.23.0" + eslint-visitor-keys "^3.0.0" + +acorn-jsx@^5.3.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn@^8.7.1: + version "8.7.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.1.tgz#0197122c843d1bf6d0a5e83220a788f278f63c30" + integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A== + +ajv@^6.10.0, ajv@^6.12.4: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +aria-query@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-4.2.2.tgz#0d2ca6c9aceb56b8977e9fed6aed7e15bbd2f83b" + integrity sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA== + dependencies: + "@babel/runtime" "^7.10.2" + "@babel/runtime-corejs3" "^7.10.2" + +array-includes@^3.1.4: + version "3.1.5" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.5.tgz#2c320010db8d31031fd2a5f6b3bbd4b1aad31bdb" + integrity sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + get-intrinsic "^1.1.1" + is-string "^1.0.7" + +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +array.prototype.flat@^1.2.5: + version "1.3.0" + resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz#0b0c1567bf57b38b56b4c97b8aa72ab45e4adc7b" + integrity sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" + +array.prototype.flatmap@^1.2.5: + version "1.3.0" + resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz#a7e8ed4225f4788a70cd910abcf0791e76a5534f" + integrity sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" + +ast-types-flow@^0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" + integrity sha1-9wtzXGvKGlycItmCw+Oef+ujva0= + +axe-core@^4.3.5: + version "4.4.2" + resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.4.2.tgz#dcf7fb6dea866166c3eab33d68208afe4d5f670c" + integrity sha512-LVAaGp/wkkgYJcjmHsoKx4juT1aQvJyPcW09MLCjVTh3V2cc6PnyempiLMNH5iMdfIX/zdbjUx2KDjMLCTdPeA== + +axobject-query@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-2.2.0.tgz#943d47e10c0b704aa42275e20edf3722648989be" + integrity sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA== + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +buffer-from@~0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-0.1.2.tgz#15f4b9bcef012044df31142c14333caf6e0260d0" + integrity sha512-RiWIenusJsmI2KcvqQABB83tLxCByE3upSP8QU3rJDMVFGPWLvPQJt/O1Su9moRWeH7d+Q2HYb68f6+v+tw2vg== + +call-bind@^1.0.0, call-bind@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +caniuse-lite@^1.0.30001332: + version "1.0.30001340" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001340.tgz#029a2f8bfc025d4820fafbfaa6259fd7778340c7" + integrity sha512-jUNz+a9blQTQVu4uFcn17uAD8IDizPzQkIKh3LCJfg9BkyIqExYYdyc/ZSlWUSKb8iYiXxKsxbv4zYSvkqjrxw== + +chalk@^4.0.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +clsx@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/clsx/-/clsx-1.1.1.tgz#98b3134f9abbdf23b2663491ace13c5c03a73188" + integrity sha512-6/bPho624p3S2pMyvP5kKBPXnI3ufHLObBFCfgx+LkeR5lg2XYy2hqZqUf45ypD8COn2bhgGJSUE+l5dhNBieA== + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +commander@7: + version "7.2.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" + integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= + +core-js-pure@^3.20.2: + version "3.22.5" + resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.22.5.tgz#bdee0ed2f9b78f2862cda4338a07b13a49b6c9a9" + integrity sha512-8xo9R00iYD7TcV7OrC98GwxiUEAabVWO3dix+uyWjnYrx9fyASLlIX+f/3p5dW5qByaP2bcZ8X/T47s55et/tA== + +core-util-is@~1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" + integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== + +cross-spawn@^7.0.2: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +csstype@3.0.9: + version "3.0.9" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.0.9.tgz#6410af31b26bd0520933d02cbc64fce9ce3fbf0b" + integrity sha512-rpw6JPxK6Rfg1zLOYCSwle2GFOOsnjmDYDaBwEcwoOg4qlsIVCN789VkBZDJAGi4T07gI4YSutR43t9Zz4Lzuw== + +csstype@^3.0.2: + version "3.0.11" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.0.11.tgz#d66700c5eacfac1940deb4e3ee5642792d85cd33" + integrity sha512-sa6P2wJ+CAbgyy4KFssIb/JNMLxFvKF1pCYCSXS8ZMuqZnMsrxqI2E5sPyoTpxoPU/gVZMzr2zjOfg8GIZOMsw== + +"d3-array@2 - 3": + version "3.1.6" + resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-3.1.6.tgz#0342c835925826f49b4d16eb7027aec334ffc97d" + integrity sha512-DCbBBNuKOeiR9h04ySRBMW52TFVc91O9wJziuyXw6Ztmy8D3oZbmCkOO3UHKC7ceNJsN2Mavo9+vwV8EAEUXzA== + dependencies: + internmap "1 - 2" + +d3-dsv@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-dsv/-/d3-dsv-3.0.1.tgz#c63af978f4d6a0d084a52a673922be2160789b73" + integrity sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q== + dependencies: + commander "7" + iconv-lite "0.6" + rw "1" + +d3-format@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-3.1.0.tgz#9260e23a28ea5cb109e93b21a06e24e2ebd55641" + integrity sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA== + +d3-time-format@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-4.1.0.tgz#7ab5257a5041d11ecb4fe70a5c7d16a195bb408a" + integrity sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg== + dependencies: + d3-time "1 - 3" + +"d3-time@1 - 3": + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-3.0.0.tgz#65972cb98ae2d4954ef5c932e8704061335d4975" + integrity sha512-zmV3lRnlaLI08y9IMRXSDshQb5Nj77smnfpnd2LrBa/2K281Jijactokeak14QacHs/kKq0AQ121nidNYlarbQ== + dependencies: + d3-array "2 - 3" + +damerau-levenshtein@^1.0.7: + version "1.0.8" + resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" + integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== + +debug@^2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@^3.2.7: + version "3.2.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +debug@^4.1.1, debug@^4.3.2, debug@^4.3.4: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +deep-is@^0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== + +define-properties@^1.1.3, define-properties@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1" + integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA== + dependencies: + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" + +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +doctrine@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + dependencies: + esutils "^2.0.2" + +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +dom-serializer@^1.0.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-1.4.1.tgz#de5d41b1aea290215dc45a6dae8adcf1d32e2d30" + integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.2.0" + entities "^2.0.0" + +domelementtype@^2.0.1, domelementtype@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" + integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== + +domhandler@4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-4.2.2.tgz#e825d721d19a86b8c201a35264e226c678ee755f" + integrity sha512-PzE9aBMsdZO8TK4BnuJwH0QT41wgMbRzuZrHUcpYncEjmQazq8QEaBWgLG7ZyC/DAZKEgglpIA6j4Qn/HmxS3w== + dependencies: + domelementtype "^2.2.0" + +domhandler@^4.0.0, domhandler@^4.2.0: + version "4.3.1" + resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c" + integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== + dependencies: + domelementtype "^2.2.0" + +domutils@^2.5.2: + version "2.8.0" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135" + integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== + dependencies: + dom-serializer "^1.0.1" + domelementtype "^2.2.0" + domhandler "^4.2.0" + +duplexer2@^0.1.2: + version "0.1.4" + resolved "https://registry.yarnpkg.com/duplexer2/-/duplexer2-0.1.4.tgz#8b12dab878c0d69e3e7891051662a32fc6bddcc1" + integrity sha1-ixLauHjA1p4+eJEFFmKjL8a93ME= + dependencies: + readable-stream "^2.0.2" + +emoji-regex@^9.2.2: + version "9.2.2" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== + +enhanced-resolve@^5.7.0: + version "5.9.3" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.9.3.tgz#44a342c012cbc473254af5cc6ae20ebd0aae5d88" + integrity sha512-Bq9VSor+kjvW3f9/MiiR4eE3XYgOl7/rS8lnSxbRbF3kS0B2r+Y9w5krBWxZgDxASVZbdYrn5wT4j/Wb0J9qow== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.2.0" + +entities@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" + integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== + +es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5: + version "1.20.0" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.0.tgz#b2d526489cceca004588296334726329e0a6bfb6" + integrity sha512-URbD8tgRthKD3YcC39vbvSDrX23upXnPcnGAjQfgxXF5ID75YcENawc9ZX/9iTP9ptUyfCLIxTTuMYoRfiOVKA== + dependencies: + call-bind "^1.0.2" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + function.prototype.name "^1.1.5" + get-intrinsic "^1.1.1" + get-symbol-description "^1.0.0" + has "^1.0.3" + has-property-descriptors "^1.0.0" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + is-callable "^1.2.4" + is-negative-zero "^2.0.2" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.2" + is-string "^1.0.7" + is-weakref "^1.0.2" + object-inspect "^1.12.0" + object-keys "^1.1.1" + object.assign "^4.1.2" + regexp.prototype.flags "^1.4.1" + string.prototype.trimend "^1.0.5" + string.prototype.trimstart "^1.0.5" + unbox-primitive "^1.0.2" + +es-shim-unscopables@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" + integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== + dependencies: + has "^1.0.3" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +escalade@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +eslint-config-next@12.1.6: + version "12.1.6" + resolved "https://registry.yarnpkg.com/eslint-config-next/-/eslint-config-next-12.1.6.tgz#55097028982dce49159d8753000be3916ac55254" + integrity sha512-qoiS3g/EPzfCTkGkaPBSX9W0NGE/B1wNO3oWrd76QszVGrdpLggNqcO8+LR6MB0CNqtp9Q8NoeVrxNVbzM9hqA== + dependencies: + "@next/eslint-plugin-next" "12.1.6" + "@rushstack/eslint-patch" "^1.1.3" + "@typescript-eslint/parser" "^5.21.0" + eslint-import-resolver-node "^0.3.6" + eslint-import-resolver-typescript "^2.7.1" + eslint-plugin-import "^2.26.0" + eslint-plugin-jsx-a11y "^6.5.1" + eslint-plugin-react "^7.29.4" + eslint-plugin-react-hooks "^4.5.0" + +eslint-import-resolver-node@^0.3.6: + version "0.3.6" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd" + integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw== + dependencies: + debug "^3.2.7" + resolve "^1.20.0" + +eslint-import-resolver-typescript@^2.7.1: + version "2.7.1" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-2.7.1.tgz#a90a4a1c80da8d632df25994c4c5fdcdd02b8751" + integrity sha512-00UbgGwV8bSgUv34igBDbTOtKhqoRMy9bFjNehT40bXg6585PNIct8HhXZ0SybqB9rWtXj9crcku8ndDn/gIqQ== + dependencies: + debug "^4.3.4" + glob "^7.2.0" + is-glob "^4.0.3" + resolve "^1.22.0" + tsconfig-paths "^3.14.1" + +eslint-module-utils@^2.7.3: + version "2.7.3" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.7.3.tgz#ad7e3a10552fdd0642e1e55292781bd6e34876ee" + integrity sha512-088JEC7O3lDZM9xGe0RerkOMd0EjFl+Yvd1jPWIkMT5u3H9+HC34mWWPnqPrN13gieT9pBOO+Qt07Nb/6TresQ== + dependencies: + debug "^3.2.7" + find-up "^2.1.0" + +eslint-plugin-import@^2.26.0: + version "2.26.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz#f812dc47be4f2b72b478a021605a59fc6fe8b88b" + integrity sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA== + dependencies: + array-includes "^3.1.4" + array.prototype.flat "^1.2.5" + debug "^2.6.9" + doctrine "^2.1.0" + eslint-import-resolver-node "^0.3.6" + eslint-module-utils "^2.7.3" + has "^1.0.3" + is-core-module "^2.8.1" + is-glob "^4.0.3" + minimatch "^3.1.2" + object.values "^1.1.5" + resolve "^1.22.0" + tsconfig-paths "^3.14.1" + +eslint-plugin-jsx-a11y@^6.5.1: + version "6.5.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.5.1.tgz#cdbf2df901040ca140b6ec14715c988889c2a6d8" + integrity sha512-sVCFKX9fllURnXT2JwLN5Qgo24Ug5NF6dxhkmxsMEUZhXRcGg+X3e1JbJ84YePQKBl5E0ZjAH5Q4rkdcGY99+g== + dependencies: + "@babel/runtime" "^7.16.3" + aria-query "^4.2.2" + array-includes "^3.1.4" + ast-types-flow "^0.0.7" + axe-core "^4.3.5" + axobject-query "^2.2.0" + damerau-levenshtein "^1.0.7" + emoji-regex "^9.2.2" + has "^1.0.3" + jsx-ast-utils "^3.2.1" + language-tags "^1.0.5" + minimatch "^3.0.4" + +eslint-plugin-react-hooks@^4.5.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.5.0.tgz#5f762dfedf8b2cf431c689f533c9d3fa5dcf25ad" + integrity sha512-8k1gRt7D7h03kd+SAAlzXkQwWK22BnK6GKZG+FJA6BAGy22CFvl8kCIXKpVux0cCxMWDQUPqSok0LKaZ0aOcCw== + +eslint-plugin-react@^7.29.4: + version "7.29.4" + resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.29.4.tgz#4717de5227f55f3801a5fd51a16a4fa22b5914d2" + integrity sha512-CVCXajliVh509PcZYRFyu/BoUEz452+jtQJq2b3Bae4v3xBUWPLCmtmBM+ZinG4MzwmxJgJ2M5rMqhqLVn7MtQ== + dependencies: + array-includes "^3.1.4" + array.prototype.flatmap "^1.2.5" + doctrine "^2.1.0" + estraverse "^5.3.0" + jsx-ast-utils "^2.4.1 || ^3.0.0" + minimatch "^3.1.2" + object.entries "^1.1.5" + object.fromentries "^2.0.5" + object.hasown "^1.1.0" + object.values "^1.1.5" + prop-types "^15.8.1" + resolve "^2.0.0-next.3" + semver "^6.3.0" + string.prototype.matchall "^4.0.6" + +eslint-scope@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642" + integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw== + dependencies: + esrecurse "^4.3.0" + estraverse "^5.2.0" + +eslint-utils@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" + integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== + dependencies: + eslint-visitor-keys "^2.0.0" + +eslint-visitor-keys@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" + integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== + +eslint-visitor-keys@^3.0.0, eslint-visitor-keys@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" + integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== + +eslint@8.14.0: + version "8.14.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.14.0.tgz#62741f159d9eb4a79695b28ec4989fcdec623239" + integrity sha512-3/CE4aJX7LNEiE3i6FeodHmI/38GZtWCsAtsymScmzYapx8q1nVVb+eLcLSzATmCPXw5pT4TqVs1E0OmxAd9tw== + dependencies: + "@eslint/eslintrc" "^1.2.2" + "@humanwhocodes/config-array" "^0.9.2" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.3.2" + doctrine "^3.0.0" + escape-string-regexp "^4.0.0" + eslint-scope "^7.1.1" + eslint-utils "^3.0.0" + eslint-visitor-keys "^3.3.0" + espree "^9.3.1" + esquery "^1.4.0" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + functional-red-black-tree "^1.0.1" + glob-parent "^6.0.1" + globals "^13.6.0" + ignore "^5.2.0" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + js-yaml "^4.1.0" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.0.4" + natural-compare "^1.4.0" + optionator "^0.9.1" + regexpp "^3.2.0" + strip-ansi "^6.0.1" + strip-json-comments "^3.1.0" + text-table "^0.2.0" + v8-compile-cache "^2.0.3" + +espree@^9.3.1, espree@^9.3.2: + version "9.3.2" + resolved "https://registry.yarnpkg.com/espree/-/espree-9.3.2.tgz#f58f77bd334731182801ced3380a8cc859091596" + integrity sha512-D211tC7ZwouTIuY5x9XnS0E9sWNChB7IYKX/Xp5eQj3nFXhqmiUDB9q27y76oFl8jTg3pXcQx/bpxMfs3CIZbA== + dependencies: + acorn "^8.7.1" + acorn-jsx "^5.3.2" + eslint-visitor-keys "^3.3.0" + +esquery@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" + integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +fancy-canvas@0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/fancy-canvas/-/fancy-canvas-0.2.2.tgz#33fd4976724169a1eda5015f515a2a1302d1ec91" + integrity sha512-50qi8xA0QkHbjmb8h7XQ6k2fvD7y/yMfiUw9YTarJ7rWrq6o5/3CCXPouYk+XSLASvvxtjyiQLRBFt3qkE3oyA== + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-glob@^3.2.9: + version "3.2.11" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.11.tgz#a1172ad95ceb8a16e20caa5c5e56480e5129c1d9" + integrity sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= + +fastq@^1.6.0: + version "1.13.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" + integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== + dependencies: + reusify "^1.0.4" + +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + dependencies: + flat-cache "^3.0.4" + +fill-range@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +find-up@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" + integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c= + dependencies: + locate-path "^2.0.0" + +flat-cache@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + dependencies: + flatted "^3.1.0" + rimraf "^3.0.2" + +flatted@^3.1.0: + version "3.2.5" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.5.tgz#76c8584f4fc843db64702a6bd04ab7a8bd666da3" + integrity sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg== + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +function.prototype.name@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" + integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.0" + functions-have-names "^1.2.2" + +functional-red-black-tree@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" + integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= + +functions-have-names@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6" + integrity sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.1" + +get-symbol-description@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" + integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.1" + +glob-parent@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob-parent@^6.0.1: + version "6.0.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== + dependencies: + is-glob "^4.0.3" + +glob@7.1.7: + version "7.1.7" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90" + integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +glob@^7.1.3, glob@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023" + integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +globals@^13.6.0, globals@^13.9.0: + version "13.15.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.15.0.tgz#38113218c907d2f7e98658af246cef8b77e90bac" + integrity sha512-bpzcOlgDhMG070Av0Vy5Owklpv1I6+j96GhUI7Rh7IzDCKLzboflLrrfqMu8NquDbiR4EOQk7XzJwqVJxicxog== + dependencies: + type-fest "^0.20.2" + +globby@^11.0.4: + version "11.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + +graceful-fs@^4.2.4: + version "4.2.10" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + +has-bigints@^1.0.1, has-bigints@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" + integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-property-descriptors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" + integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== + dependencies: + get-intrinsic "^1.1.1" + +has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +has-tostringtag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== + dependencies: + has-symbols "^1.0.2" + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +hoist-non-react-statics@^3.3.1: + version "3.3.2" + resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz#ece0acaf71d62c2969c2ec59feff42a4b1a85b45" + integrity sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw== + dependencies: + react-is "^16.7.0" + +html-dom-parser@1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/html-dom-parser/-/html-dom-parser-1.0.2.tgz#bb5ff844f214657d899aa4fb7b0a9e7d15607e96" + integrity sha512-Jq4oVkVSn+10ut3fyc2P/Fs1jqTo0l45cP6Q8d2ef/9jfkYwulO0QXmyLI0VUiZrXF4czpGgMEJRa52CQ6Fk8Q== + dependencies: + domhandler "4.2.2" + htmlparser2 "6.1.0" + +html-react-parser@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/html-react-parser/-/html-react-parser-1.3.0.tgz#81eff0558f34183ac1d372aa9218b8fb47bb3d6d" + integrity sha512-lhpkOFH8pwqEjlNUYCWvjT43/JVCZO9MAZuCS6afT1/VP+bZcNxNUs4AUqiMzH0QPSDHwM/GFNXZNok1KTA4BQ== + dependencies: + domhandler "4.2.2" + html-dom-parser "1.0.2" + react-property "2.0.0" + style-to-js "1.1.0" + +html-tokenize@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/html-tokenize/-/html-tokenize-2.0.1.tgz#c3b2ea6e2837d4f8c06693393e9d2a12c960be5f" + integrity sha512-QY6S+hZ0f5m1WT8WffYN+Hg+xm/w5I8XeUcAq/ZYP5wVC8xbKi4Whhru3FtrAebD5EhBW8rmFzkDI6eCAuFe2w== + dependencies: + buffer-from "~0.1.1" + inherits "~2.0.1" + minimist "~1.2.5" + readable-stream "~1.0.27-1" + through2 "~0.4.1" + +htmlparser2@6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7" + integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.0.0" + domutils "^2.5.2" + entities "^2.0.0" + +iconv-lite@0.6: + version "0.6.3" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" + integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== + dependencies: + safer-buffer ">= 2.1.2 < 3.0.0" + +ignore@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" + integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== + +import-fresh@^3.0.0, import-fresh@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@~2.0.1, inherits@~2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inline-style-parser@0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/inline-style-parser/-/inline-style-parser-0.1.1.tgz#ec8a3b429274e9c0a1f1c4ffa9453a7fef72cea1" + integrity sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q== + +internal-slot@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" + integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== + dependencies: + get-intrinsic "^1.1.0" + has "^1.0.3" + side-channel "^1.0.4" + +"internmap@1 - 2": + version "2.0.3" + resolved "https://registry.yarnpkg.com/internmap/-/internmap-2.0.3.tgz#6685f23755e43c524e251d29cbc97248e3061009" + integrity sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg== + +is-bigint@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== + dependencies: + has-bigints "^1.0.1" + +is-boolean-object@^1.1.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-callable@^1.1.4, is-callable@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.4.tgz#47301d58dd0259407865547853df6d61fe471945" + integrity sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w== + +is-core-module@^2.2.0, is-core-module@^2.8.1: + version "2.9.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.9.0.tgz#e1c34429cd51c6dd9e09e0799e396e27b19a9c69" + integrity sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A== + dependencies: + has "^1.0.3" + +is-date-object@^1.0.1: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== + dependencies: + has-tostringtag "^1.0.0" + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= + +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-negative-zero@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" + integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== + +is-number-object@^1.0.4: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" + integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== + dependencies: + has-tostringtag "^1.0.0" + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-regex@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-shared-array-buffer@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" + integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== + dependencies: + call-bind "^1.0.2" + +is-string@^1.0.5, is-string@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== + dependencies: + has-tostringtag "^1.0.0" + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== + dependencies: + has-symbols "^1.0.2" + +is-weakref@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" + integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== + dependencies: + call-bind "^1.0.2" + +isarray@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" + integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= + +isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= + +"js-tokens@^3.0.0 || ^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE= + +json5@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" + integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== + dependencies: + minimist "^1.2.0" + +"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.3.0.tgz#e624f259143b9062c92b6413ff92a164c80d3ccb" + integrity sha512-XzO9luP6L0xkxwhIJMTJQpZo/eeN60K08jHdexfD569AGxeNug6UketeHXEhROoM8aR7EcUoOQmIhcJQjcuq8Q== + dependencies: + array-includes "^3.1.4" + object.assign "^4.1.2" + +language-subtag-registry@~0.3.2: + version "0.3.21" + resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.21.tgz#04ac218bea46f04cb039084602c6da9e788dd45a" + integrity sha512-L0IqwlIXjilBVVYKFT37X9Ih11Um5NEl9cbJIuU/SwP/zEEAbBPOnEeeuxVMf45ydWQRDQN3Nqc96OgbH1K+Pg== + +language-tags@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/language-tags/-/language-tags-1.0.5.tgz#d321dbc4da30ba8bf3024e040fa5c14661f9193a" + integrity sha1-0yHbxNowuovzAk4ED6XBRmH5GTo= + dependencies: + language-subtag-registry "~0.3.2" + +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +lightweight-charts@^3.8.0: + version "3.8.0" + resolved "https://registry.yarnpkg.com/lightweight-charts/-/lightweight-charts-3.8.0.tgz#8c41ad7c1c083f18621f11ece7fc1096e131a0d3" + integrity sha512-7yFGnYuE1RjRJG9RwUTBz5wvF1QtjBOSW4FFlikr8Dh+/TDNt4ci+HsWSYmStgQUpawpvkCJ3j5/W25GppGj9Q== + dependencies: + fancy-canvas "0.2.2" + +locate-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" + integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4= + dependencies: + p-locate "^2.0.0" + path-exists "^3.0.0" + +lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +micromatch@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + dependencies: + braces "^3.0.2" + picomatch "^2.3.1" + +minimatch@^3.0.4, minimatch@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimist@^1.2.0, minimist@^1.2.6, minimist@~1.2.5: + version "1.2.6" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" + integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== + +moment@^2.29.3: + version "2.29.3" + resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.3.tgz#edd47411c322413999f7a5940d526de183c031f3" + integrity sha512-c6YRvhEo//6T2Jz/vVtYzqBzwvPT95JBQ+smCytzf7c50oMZRsR/a4w88aD34I+/QVSfnoAnSBFPJHItlOMJVw== + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@^2.1.1: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +multipipe@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/multipipe/-/multipipe-1.0.2.tgz#cc13efd833c9cda99f224f868461b8e1a3fd939d" + integrity sha1-zBPv2DPJzamfIk+GhGG44aP9k50= + dependencies: + duplexer2 "^0.1.2" + object-assign "^4.1.0" + +nanoid@^3.1.30: + version "3.3.4" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab" + integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= + +next-transpile-modules@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/next-transpile-modules/-/next-transpile-modules-9.0.0.tgz#133b1742af082e61cc76b02a0f12ffd40ce2bf90" + integrity sha512-VCNFOazIAnXn1hvgYYSTYMnoWgKgwlYh4lm1pKbSfiB3kj5ZYLcKVhfh3jkPOg1cnd9DP+pte9yCUocdPEUBTQ== + dependencies: + enhanced-resolve "^5.7.0" + escalade "^3.1.1" + +next@12.1.6: + version "12.1.6" + resolved "https://registry.yarnpkg.com/next/-/next-12.1.6.tgz#eb205e64af1998651f96f9df44556d47d8bbc533" + integrity sha512-cebwKxL3/DhNKfg9tPZDQmbRKjueqykHHbgaoG4VBRH3AHQJ2HO0dbKFiS1hPhe1/qgc2d/hFeadsbPicmLD+A== + dependencies: + "@next/env" "12.1.6" + caniuse-lite "^1.0.30001332" + postcss "8.4.5" + styled-jsx "5.0.2" + optionalDependencies: + "@next/swc-android-arm-eabi" "12.1.6" + "@next/swc-android-arm64" "12.1.6" + "@next/swc-darwin-arm64" "12.1.6" + "@next/swc-darwin-x64" "12.1.6" + "@next/swc-linux-arm-gnueabihf" "12.1.6" + "@next/swc-linux-arm64-gnu" "12.1.6" + "@next/swc-linux-arm64-musl" "12.1.6" + "@next/swc-linux-x64-gnu" "12.1.6" + "@next/swc-linux-x64-musl" "12.1.6" + "@next/swc-win32-arm64-msvc" "12.1.6" + "@next/swc-win32-ia32-msvc" "12.1.6" + "@next/swc-win32-x64-msvc" "12.1.6" + +object-assign@^4.1.0, object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= + +object-inspect@^1.12.0, object-inspect@^1.9.0: + version "1.12.0" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.0.tgz#6e2c120e868fd1fd18cb4f18c31741d0d6e776f0" + integrity sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g== + +object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object-keys@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-0.4.0.tgz#28a6aae7428dd2c3a92f3d95f21335dd204e0336" + integrity sha1-KKaq50KN0sOpLz2V8hM13SBOAzY= + +object.assign@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" + integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + has-symbols "^1.0.1" + object-keys "^1.1.1" + +object.entries@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.5.tgz#e1acdd17c4de2cd96d5a08487cfb9db84d881861" + integrity sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.fromentries@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.5.tgz#7b37b205109c21e741e605727fe8b0ad5fa08251" + integrity sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.hasown@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object.hasown/-/object.hasown-1.1.1.tgz#ad1eecc60d03f49460600430d97f23882cf592a3" + integrity sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A== + dependencies: + define-properties "^1.1.4" + es-abstract "^1.19.5" + +object.values@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.5.tgz#959f63e3ce9ef108720333082131e4a459b716ac" + integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + dependencies: + wrappy "1" + +optionator@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + +p-limit@^1.1.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" + integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q== + dependencies: + p-try "^1.0.0" + +p-locate@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" + integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM= + dependencies: + p-limit "^1.1.0" + +p-try@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" + integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M= + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= + +path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.6, path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +picocolors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + +picomatch@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +postcss@8.4.5: + version "8.4.5" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.5.tgz#bae665764dfd4c6fcc24dc0fdf7e7aa00cc77f95" + integrity sha512-jBDboWM8qpaqwkMwItqTQTiFikhs/67OYVvblFFTM7MrZjt6yMKd6r2kgXizEbTTljacm4NldIlZnhbjr84QYg== + dependencies: + nanoid "^3.1.30" + picocolors "^1.0.0" + source-map-js "^1.0.1" + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +prop-types@^15.8.1: + version "15.8.1" + resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" + integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== + dependencies: + loose-envify "^1.4.0" + object-assign "^4.1.1" + react-is "^16.13.1" + +punycode@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +react-dom@18.1.0: + version "18.1.0" + resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-18.1.0.tgz#7f6dd84b706408adde05e1df575b3a024d7e8a2f" + integrity sha512-fU1Txz7Budmvamp7bshe4Zi32d0ll7ect+ccxNu9FlObT605GOEB8BfO4tmRJ39R5Zj831VCpvQ05QPBW5yb+w== + dependencies: + loose-envify "^1.1.0" + scheduler "^0.22.0" + +react-fast-compare@^3.0.1: + version "3.2.0" + resolved "https://registry.yarnpkg.com/react-fast-compare/-/react-fast-compare-3.2.0.tgz#641a9da81b6a6320f270e89724fb45a0b39e43bb" + integrity sha512-rtGImPZ0YyLrscKI9xTpV8psd6I8VAtjKCzQDlzyDvqJA8XOW78TXYQwNRNd8g8JZnDu8q9Fu/1v4HPAVwVdHA== + +react-is@^16.13.1, react-is@^16.7.0: + version "16.13.1" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" + integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== + +react-popper@^2.2.5: + version "2.3.0" + resolved "https://registry.yarnpkg.com/react-popper/-/react-popper-2.3.0.tgz#17891c620e1320dce318bad9fede46a5f71c70ba" + integrity sha512-e1hj8lL3uM+sgSR4Lxzn5h1GxBlpa4CQz0XLF8kx4MDrDRWY0Ena4c97PUeSX9i5W3UAfDP0z0FXCTQkoXUl3Q== + dependencies: + react-fast-compare "^3.0.1" + warning "^4.0.2" + +react-property@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/react-property/-/react-property-2.0.0.tgz#2156ba9d85fa4741faf1918b38efc1eae3c6a136" + integrity sha512-kzmNjIgU32mO4mmH5+iUyrqlpFQhF8K2k7eZ4fdLSOPFrD1XgEuSBv9LDEgxRXTMBqMd8ppT0x6TIzqE5pdGdw== + +react-textarea-autosize@^8.3.2: + version "8.3.4" + resolved "https://registry.yarnpkg.com/react-textarea-autosize/-/react-textarea-autosize-8.3.4.tgz#270a343de7ad350534141b02c9cb78903e553524" + integrity sha512-CdtmP8Dc19xL8/R6sWvtknD/eCXkQr30dtvC4VmGInhRsfF8X/ihXCq6+9l9qbxmKRiq407/7z5fxE7cVWQNgQ== + dependencies: + "@babel/runtime" "^7.10.2" + use-composed-ref "^1.3.0" + use-latest "^1.2.1" + +react@18.1.0: + version "18.1.0" + resolved "https://registry.yarnpkg.com/react/-/react-18.1.0.tgz#6f8620382decb17fdc5cc223a115e2adbf104890" + integrity sha512-4oL8ivCz5ZEPyclFQXaNksK3adutVS8l2xzZU0cqEFrE9Sb7fC0EFK5uEk74wIreL1DERyjvsU915j1pcT2uEQ== + dependencies: + loose-envify "^1.1.0" + +readable-stream@^2.0.2: + version "2.3.7" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" + integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@~1.0.17, readable-stream@~1.0.27-1: + version "1.0.34" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c" + integrity sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw= + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.1" + isarray "0.0.1" + string_decoder "~0.10.x" + +regenerator-runtime@^0.13.4: + version "0.13.9" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" + integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== + +regexp.prototype.flags@^1.4.1: + version "1.4.3" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" + integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + functions-have-names "^1.2.2" + +regexpp@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" + integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve@^1.20.0, resolve@^1.22.0: + version "1.22.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.0.tgz#5e0b8c67c15df57a89bdbabe603a002f21731198" + integrity sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw== + dependencies: + is-core-module "^2.8.1" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +resolve@^2.0.0-next.3: + version "2.0.0-next.3" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.3.tgz#d41016293d4a8586a39ca5d9b5f15cbea1f55e46" + integrity sha512-W8LucSynKUIDu9ylraa7ueVZ7hc0uAgJBxVsQSKOXOyle8a93qXhcz+XAXZ8bIq2d6i4Ehddn6Evt+0/UwKk6Q== + dependencies: + is-core-module "^2.2.0" + path-parse "^1.0.6" + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +rw@1: + version "1.3.3" + resolved "https://registry.yarnpkg.com/rw/-/rw-1.3.3.tgz#3f862dfa91ab766b14885ef4d01124bfda074fb4" + integrity sha1-P4Yt+pGrdmsUiF700BEkv9oHT7Q= + +safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +"safer-buffer@>= 2.1.2 < 3.0.0": + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +scheduler@^0.22.0: + version "0.22.0" + resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.22.0.tgz#83a5d63594edf074add9a7198b1bae76c3db01b8" + integrity sha512-6QAm1BgQI88NPYymgGQLCZgvep4FyePDWFpXVK+zNSUgHwlqpJy8VEh8Et0KxTACS4VWwMousBElAZOH9nkkoQ== + dependencies: + loose-envify "^1.1.0" + +semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +semver@^7.3.5: + version "7.3.7" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" + integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== + dependencies: + lru-cache "^6.0.0" + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +side-channel@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +source-map-js@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" + integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== + +string.prototype.matchall@^4.0.6: + version "4.0.7" + resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz#8e6ecb0d8a1fb1fda470d81acecb2dba057a481d" + integrity sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + get-intrinsic "^1.1.1" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + regexp.prototype.flags "^1.4.1" + side-channel "^1.0.4" + +string.prototype.trimend@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz#914a65baaab25fbdd4ee291ca7dde57e869cb8d0" + integrity sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string.prototype.trimstart@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz#5466d93ba58cfa2134839f81d7f42437e8c01fef" + integrity sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string_decoder@~0.10.x: + version "0.10.31" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" + integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ= + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= + +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +style-to-js@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/style-to-js/-/style-to-js-1.1.0.tgz#631cbb20fce204019b3aa1fcb5b69d951ceac4ac" + integrity sha512-1OqefPDxGrlMwcbfpsTVRyzwdhr4W0uxYQzeA2F1CBc8WG04udg2+ybRnvh3XYL4TdHQrCahLtax2jc8xaE6rA== + dependencies: + style-to-object "0.3.0" + +style-to-object@0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/style-to-object/-/style-to-object-0.3.0.tgz#b1b790d205991cc783801967214979ee19a76e46" + integrity sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA== + dependencies: + inline-style-parser "0.1.1" + +styled-jsx@5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/styled-jsx/-/styled-jsx-5.0.2.tgz#ff230fd593b737e9e68b630a694d460425478729" + integrity sha512-LqPQrbBh3egD57NBcHET4qcgshPks+yblyhPlH2GY8oaDgKs8SK4C3dBh3oSJjgzJ3G5t1SYEZGHkP+QEpX9EQ== + +stylis@4.0.13: + version "4.0.13" + resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.0.13.tgz#f5db332e376d13cc84ecfe5dace9a2a51d954c91" + integrity sha512-xGPXiFVl4YED9Jh7Euv2V220mriG9u4B2TA6Ybjc1catrstKD2PpIdU3U0RKpkVBC2EhmL/F0sPCr9vrFTNRag== + +supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +tabler-icons-react@^1.48.0: + version "1.48.0" + resolved "https://registry.yarnpkg.com/tabler-icons-react/-/tabler-icons-react-1.48.0.tgz#2b3251d4b9effa1e78baf4cb05fe7cf79449f116" + integrity sha512-dlcAIGYIB7+fsU1tj8HuK5aN57g3Q5KD8GMAxpBR9E62yFhjn8fbwD2M2X18E66v5TYakUhu6tfUxM+/jvI6Kg== + +tapable@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" + integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= + +through2@~0.4.1: + version "0.4.2" + resolved "https://registry.yarnpkg.com/through2/-/through2-0.4.2.tgz#dbf5866031151ec8352bb6c4db64a2292a840b9b" + integrity sha1-2/WGYDEVHsg1K7bE22SiKSqEC5s= + dependencies: + readable-stream "~1.0.17" + xtend "~2.1.1" + +through@^2.3.8: + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +tsconfig-paths@^3.14.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz#ba0734599e8ea36c862798e920bcf163277b137a" + integrity sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ== + dependencies: + "@types/json5" "^0.0.29" + json5 "^1.0.1" + minimist "^1.2.6" + strip-bom "^3.0.0" + +tslib@^1.8.1: + version "1.14.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tsutils@^3.21.0: + version "3.21.0" + resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" + integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== + dependencies: + tslib "^1.8.1" + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-fest@^0.20.2: + version "0.20.2" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +typescript@4.6.4: + version "4.6.4" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.6.4.tgz#caa78bbc3a59e6a5c510d35703f6a09877ce45e9" + integrity sha512-9ia/jWHIEbo49HfjrLGfKbZSuWo9iTMwXO+Ca3pRsSpbsMbc7/IU8NKdCZVRRBafVPGnoJeFL76ZOAA84I9fEg== + +unbox-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" + integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== + dependencies: + call-bind "^1.0.2" + has-bigints "^1.0.2" + has-symbols "^1.0.3" + which-boxed-primitive "^1.0.2" + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +use-composed-ref@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/use-composed-ref/-/use-composed-ref-1.3.0.tgz#3d8104db34b7b264030a9d916c5e94fbe280dbda" + integrity sha512-GLMG0Jc/jiKov/3Ulid1wbv3r54K9HlMW29IWcDFPEqFkSO2nS0MuefWgMJpeHQ9YJeXDL3ZUF+P3jdXlZX/cQ== + +use-isomorphic-layout-effect@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.1.2.tgz#497cefb13d863d687b08477d9e5a164ad8c1a6fb" + integrity sha512-49L8yCO3iGT/ZF9QttjwLF/ZD9Iwto5LnH5LmEdk/6cFmXddqi2ulF0edxTwjj+7mqvpVVGQWvbXZdn32wRSHA== + +use-latest@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/use-latest/-/use-latest-1.2.1.tgz#d13dfb4b08c28e3e33991546a2cee53e14038cf2" + integrity sha512-xA+AVm/Wlg3e2P/JiItTziwS7FK92LWrDB0p+hgXloIMuVCeJJ8v6f0eeHyPZaJrM+usM1FkFfbNCrJGs8A/zw== + dependencies: + use-isomorphic-layout-effect "^1.1.1" + +util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + +v8-compile-cache@^2.0.3: + version "2.3.0" + resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" + integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== + +warning@^4.0.2: + version "4.0.3" + resolved "https://registry.yarnpkg.com/warning/-/warning-4.0.3.tgz#16e9e077eb8a86d6af7d64aa1e05fd85b4678ca3" + integrity sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w== + dependencies: + loose-envify "^1.0.0" + +which-boxed-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +word-wrap@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + +xtend@~2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-2.1.2.tgz#6efecc2a4dad8e6962c4901b337ce7ba87b5d28b" + integrity sha1-bv7MKk2tjmlixJAbM3znuoe10os= + dependencies: + object-keys "~0.4.0" + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== diff --git a/assets/bbg-256.png b/assets/bbg-256.png new file mode 100644 index 0000000000..a1dfbef63e Binary files /dev/null and b/assets/bbg-256.png differ diff --git a/assets/bbg-32.png b/assets/bbg-32.png new file mode 100644 index 0000000000..607bf15076 Binary files /dev/null and b/assets/bbg-32.png differ diff --git a/assets/bbg-512.png b/assets/bbg-512.png new file mode 100644 index 0000000000..09383e27e0 Binary files /dev/null and b/assets/bbg-512.png differ diff --git a/assets/overview.svg b/assets/overview.svg new file mode 100644 index 0000000000..371d1b0985 --- /dev/null +++ b/assets/overview.svg @@ -0,0 +1,3 @@ + + +
API - KEY
API - KEY
bbgo/cmd/root.go 
bbgo/cmd/root.go 
bbgo/cmd/run.go
bbgo/cmd/run.go
pkg/bbgo/environment.go
pkg/bbgo/environment.go
pkg/bbgo/trader.go
pkg/bbgo/trader.go
pkg/strategy/grid/strategy.go
pkg/strategy/grid/strategy.go
API - KEY
API - KEY
Session
Session
DBDriver
DBDriver
Mysql
Mysql
Init
Init
userConfig.ExchangeStrategies
userConfig.ExchangeStrategies
bbgo.yaml
( file )
bbgo.yaml...
.env.local
( file )
.env.local...
Grid
Grid
Boll Grid
Boll Grid
pkg/strategy/bollgrid/strategy.go
pkg/strategy/bollgrid/strategy.go
... 
... 
pkg/strategy/.../strategy.go
pkg/strategy/.../strategy.go
FTX
FTX
Exchange
Exchange
低買
低買
高賣
高賣
1800 usd
1800 usd
1900 usd
1900 usd
2000 usd
2000 usd
2100 usd
2100 usd
2200 usd
2200 usd
遠東商銀
遠東商銀
其他銀行
其他銀行
Binance
Binance
API - KEY
API - KEY
...
...
Viewer does not support full SVG 1.1
\ No newline at end of file diff --git a/assets/screenshots/backtest-report.jpg b/assets/screenshots/backtest-report.jpg new file mode 100644 index 0000000000..adeeca0720 Binary files /dev/null and b/assets/screenshots/backtest-report.jpg differ diff --git a/assets/screenshots/dashboard.jpeg b/assets/screenshots/dashboard.jpeg new file mode 100644 index 0000000000..96efd3e8b3 Binary files /dev/null and b/assets/screenshots/dashboard.jpeg differ diff --git a/assets/screenshots/setup-wizard-grid.jpeg b/assets/screenshots/setup-wizard-grid.jpeg new file mode 100644 index 0000000000..77b02cf4c1 Binary files /dev/null and b/assets/screenshots/setup-wizard-grid.jpeg differ diff --git a/assets/screenshots/setup-wizard.jpeg b/assets/screenshots/setup-wizard.jpeg new file mode 100644 index 0000000000..4a3b65008f Binary files /dev/null and b/assets/screenshots/setup-wizard.jpeg differ diff --git a/charts/bbgo/Chart.yaml b/charts/bbgo/Chart.yaml index ff4d21a59c..85c81631fb 100644 --- a/charts/bbgo/Chart.yaml +++ b/charts/bbgo/Chart.yaml @@ -1,6 +1,6 @@ apiVersion: v2 name: bbgo -description: a helm chart for bbgo trading bot +description: Helm chart for bbgo trading bot # A chart can be either an 'application' or a 'library' chart. # @@ -15,9 +15,9 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.1.0 +version: 0.3.0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. -appVersion: 1.16.0 +appVersion: 1.21.0 diff --git a/charts/bbgo/config b/charts/bbgo/config deleted file mode 120000 index 899f698982..0000000000 --- a/charts/bbgo/config +++ /dev/null @@ -1 +0,0 @@ -../../config \ No newline at end of file diff --git a/charts/bbgo/templates/configmap.yaml b/charts/bbgo/templates/configmap.yaml index bef2bb5289..91ae2d8723 100644 --- a/charts/bbgo/templates/configmap.yaml +++ b/charts/bbgo/templates/configmap.yaml @@ -1,3 +1,4 @@ +{{- if .Values.configmap }} --- apiVersion: v1 kind: ConfigMap @@ -6,5 +7,12 @@ metadata: labels: {{- include "bbgo.labels" . | nindent 4 }} data: + # if configmap is given as a string, we read it as a file + {{- if eq "string" (typeOf .Values.configmap) }} bbgo.yaml: |- - {{- .Files.Get .Values.configmap.file | nindent 4 }} + {{- .Files.Get .Values.configmap | nindent 4 }} + {{- else }} + bbgo.yaml: + {{- .Values.configmap | nindent 4 }} + {{- end }} +{{- end }} diff --git a/charts/bbgo/templates/deployment.yaml b/charts/bbgo/templates/deployment.yaml index d3ee68c9d3..c62d2e4160 100644 --- a/charts/bbgo/templates/deployment.yaml +++ b/charts/bbgo/templates/deployment.yaml @@ -8,6 +8,9 @@ spec: {{- if not .Values.autoscaling.enabled }} replicas: {{ .Values.replicaCount }} {{- end }} + strategy: + # we need to cleanly cancel all the orders, so we use Recreate strategy here + type: Recreate selector: matchLabels: {{- include "bbgo.selectorLabels" . | nindent 6 }} @@ -33,14 +36,48 @@ spec: {{- toYaml .Values.securityContext | nindent 12 }} image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" imagePullPolicy: {{ .Values.image.pullPolicy }} - # ports: - # - name: http - # containerPort: 80 - # protocol: TCP - # livenessProbe: - # httpGet: - # path: / - # port: http + args: + {{- if .Values.metrics.enabled }} + - "--metrics" + - "--metrics-port" + - {{ .Values.metrics.port | quote }} + {{- end }} + - "run" + - "--config" + - "/config/bbgo.yaml" + - "--no-compile" + {{- if .Values.webserver.enabled }} + - "--enable-webserver" + {{- end }} + {{- if .Values.grpc.enabled }} + - "--enable-grpc" + - "--grpc-bind" + - {{ printf ":%d" (.Values.grpc.port | int) | default ":50051" | quote }} + {{- end }} + {{- if .Values.debug.enabled }} + - "--debug" + {{- end }} + + ports: + {{- if .Values.webserver.enabled }} + - name: http + containerPort: 8080 + protocol: TCP + {{- end }} + {{- if .Values.grpc.enabled }} + - name: grpc + containerPort: {{ .Values.grpc.port | default 50051 }} + protocol: TCP + {{- end }} + {{- if .Values.metrics.enabled }} + - name: metrics + containerPort: 9090 + protocol: TCP + livenessProbe: + httpGet: + path: /metrics + port: metrics + {{- end }} # readinessProbe: # httpGet: # path: / @@ -59,7 +96,11 @@ spec: volumes: - name: config-volume configMap: + {{- if .Values.existingConfigMap }} + name: {{ .Values.existingConfigMap }} + {{- else }} name: {{ include "bbgo.fullname" . }} + {{- end }} {{- with .Values.nodeSelector }} nodeSelector: diff --git a/charts/bbgo/templates/podmonitor.yaml b/charts/bbgo/templates/podmonitor.yaml new file mode 100644 index 0000000000..ce96fb638e --- /dev/null +++ b/charts/bbgo/templates/podmonitor.yaml @@ -0,0 +1,15 @@ +{{- if .Values.metrics.enabled }} +--- +apiVersion: monitoring.coreos.com/v1 +kind: PodMonitor +metadata: + name: {{ include "bbgo.fullname" . }} + labels: + {{- include "bbgo.labels" . | nindent 4 }} +spec: + selector: + matchLabels: + {{- include "bbgo.selectorLabels" . | nindent 6 }} + podMetricsEndpoints: + - port: metrics +{{- end }} \ No newline at end of file diff --git a/charts/bbgo/templates/service.yaml b/charts/bbgo/templates/service.yaml index 154fc73c3d..ce4243a7d7 100644 --- a/charts/bbgo/templates/service.yaml +++ b/charts/bbgo/templates/service.yaml @@ -11,5 +11,11 @@ spec: targetPort: http protocol: TCP name: http + {{- if .Values.grpc.enabled }} + - port: {{ .Values.grpc.port | default 50051 }} + targetPort: grpc + protocol: TCP + name: grpc + {{- end }} selector: {{- include "bbgo.selectorLabels" . | nindent 4 }} diff --git a/charts/bbgo/values.yaml b/charts/bbgo/values.yaml index 5be9acba71..179c317773 100644 --- a/charts/bbgo/values.yaml +++ b/charts/bbgo/values.yaml @@ -15,9 +15,16 @@ imagePullSecrets: [] nameOverride: "" fullnameOverride: "" +# existingConfigMap is used for loading the existing configmap +# optional, if you have a configmap rather than the default configmap name +existingConfigMap: + +# configmap could be a string pointing to the local file in the chart +# or a object with inlined bbgo yaml config configmap: - file: "config/bbgo.yaml" +# dotenv defines environment variables, which are loaded from a k8s secret. +# if not defined, then release name will be used to load the secret. dotenv: secret: null @@ -45,7 +52,7 @@ securityContext: {} service: type: ClusterIP - port: 80 + port: 8080 ingress: enabled: false @@ -60,6 +67,20 @@ ingress: # hosts: # - chart-example.local +webserver: + enabled: false + +metrics: + enabled: false + port: 9090 + +grpc: + enabled: false + port: 50051 + +debug: + enabled: false + resources: # We usually recommend not to specify default resources and to leave this as a conscious # choice for the user. This also increases chances charts run on environments with little diff --git a/cmd/bbgo-lorca/main.go b/cmd/bbgo-lorca/main.go new file mode 100644 index 0000000000..3610661643 --- /dev/null +++ b/cmd/bbgo-lorca/main.go @@ -0,0 +1,162 @@ +package main + +import ( + "context" + "net" + "os" + "os/signal" + "path/filepath" + "runtime" + time2 "time" + + "github.com/joho/godotenv" + "github.com/zserge/lorca" + + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/cmd" + "github.com/c9s/bbgo/pkg/server" +) + +func main() { + ep, err := os.Executable() + if err != nil { + log.Fatalln("failed to find the current executable:", err) + } + + err = os.Chdir(filepath.Join(filepath.Dir(ep), "..", "Resources")) + if err != nil { + log.Fatalln("chdir error:", err) + } + + dotenvFile := ".env.local" + if _, err := os.Stat(dotenvFile); err == nil { + if err := godotenv.Load(dotenvFile); err != nil { + log.WithError(err).Error("error loading dotenv file") + return + } + } + + var args []string + if runtime.GOOS == "linux" { + args = append(args, "--class=bbgo") + } + + // args = append(args, "--enable-logging", "--v=99") + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + // here allocate a chrome window with a blank page. + ui, err := lorca.New("", "", 1024, 780, args...) + if err != nil { + log.WithError(err).Error("failed to initialize the window") + return + } + + defer ui.Close() + + configFile := "bbgo.yaml" + var setup *server.Setup + var userConfig *bbgo.Config + + _, err = os.Stat(configFile) + if os.IsNotExist(err) { + setup = &server.Setup{ + Context: ctx, + Cancel: cancel, + Token: "", + BeforeRestart: func() { + if err := ui.Close(); err != nil { + log.WithError(err).Errorf("ui close error") + } + }, + } + userConfig = &bbgo.Config{ + Notifications: nil, + Persistence: nil, + Sessions: nil, + ExchangeStrategies: nil, + } + } else { + userConfig, err = bbgo.Load(configFile, true) + if err != nil { + log.WithError(err).Error("can not load config file") + return + } + } + + environ := bbgo.NewEnvironment() + trader := bbgo.NewTrader(environ) + + // we could initialize the environment from the settings + if setup == nil { + if err := cmd.BootstrapEnvironment(ctx, environ, userConfig); err != nil { + log.WithError(err).Error("failed to bootstrap environment") + return + } + + if err := trader.Configure(userConfig); err != nil { + log.WithError(err).Error("failed to configure trader") + return + } + + if err := trader.LoadState(); err != nil { + log.WithError(err).Error("failed to load strategy states") + return + } + + // for setup mode, we don't start the trader + go func() { + if err := trader.Run(ctx); err != nil { + log.WithError(err).Error("failed to start trader") + } + }() + } + + // find a free port for binding the server + ln, err := net.Listen("tcp", "127.0.0.1:0") + if err != nil { + log.WithError(err).Error("can not bind listener") + return + } + + defer ln.Close() + + baseURL := "http://" + ln.Addr().String() + + srv := &server.Server{ + Config: userConfig, + Environ: environ, + Trader: trader, + OpenInBrowser: false, + Setup: setup, + } + + go func() { + if err := srv.RunWithListener(ctx, ln); err != nil { + log.WithError(err).Errorf("server error") + } + }() + + log.Infof("pinging the server at %s", baseURL) + server.PingUntil(ctx, time2.Second, baseURL, func() { + log.Infof("got pong, loading base url %s to ui...", baseURL) + + if err := ui.Load(baseURL); err != nil { + log.WithError(err).Error("failed to load page") + } + }) + + // Wait until the interrupt signal arrives or browser window is closed + sigc := make(chan os.Signal) + signal.Notify(sigc, os.Interrupt) + + select { + case <-sigc: + case <-ui.Done(): + } + + log.Println("exiting...") +} diff --git a/cmd/bbgo-webview/main.go b/cmd/bbgo-webview/main.go new file mode 100644 index 0000000000..3aff9361e4 --- /dev/null +++ b/cmd/bbgo-webview/main.go @@ -0,0 +1,166 @@ +package main + +import ( + "context" + "flag" + "net" + "os" + "os/signal" + "path/filepath" + "strconv" + "time" + + "github.com/joho/godotenv" + "github.com/webview/webview" + + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/cmd" + "github.com/c9s/bbgo/pkg/server" +) + +func main() { + noChangeDir := false + portNum := 0 + flag.BoolVar(&noChangeDir, "no-chdir", false, "do not change directory") + flag.IntVar(&portNum, "port", 0, "server port") + flag.Parse() + + if !noChangeDir { + ep, err := os.Executable() + if err != nil { + log.Fatalln("failed to find the current executable:", err) + } + + resourceDir := filepath.Join(filepath.Dir(ep), "..", "Resources") + if _, err := os.Stat(resourceDir); err == nil { + err = os.Chdir(resourceDir) + if err != nil { + log.Fatalln("chdir error:", err) + } + } + } + + dotenvFile := ".env.local" + if _, err := os.Stat(dotenvFile); err == nil { + if err := godotenv.Load(dotenvFile); err != nil { + log.WithError(err).Error("error loading dotenv file") + return + } + } + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + debug, _ := strconv.ParseBool(os.Getenv("DEBUG_WEBVIEW")) + view := webview.New(debug) + defer view.Destroy() + + view.SetTitle("BBGO") + view.SetSize(1024, 780, webview.HintNone) + + configFile := "bbgo.yaml" + var setup *server.Setup + var userConfig *bbgo.Config + + _, err := os.Stat(configFile) + if os.IsNotExist(err) { + setup = &server.Setup{ + Context: ctx, + Cancel: cancel, + Token: "", + BeforeRestart: func() { + view.Destroy() + }, + } + userConfig = &bbgo.Config{ + Notifications: nil, + Persistence: nil, + Sessions: nil, + ExchangeStrategies: nil, + } + } else { + userConfig, err = bbgo.Load(configFile, true) + if err != nil { + log.WithError(err).Error("can not load config file") + return + } + } + + environ := bbgo.NewEnvironment() + trader := bbgo.NewTrader(environ) + + // we could initialize the environment from the settings + if setup == nil { + if err := cmd.BootstrapEnvironment(ctx, environ, userConfig); err != nil { + log.WithError(err).Error("failed to bootstrap environment") + return + } + + // we could initialize the environment from the settings + go func() { + if err := environ.Sync(ctx); err != nil { + log.WithError(err).Error("failed to sync data") + return + } + + if err := trader.Configure(userConfig); err != nil { + log.WithError(err).Error("failed to configure trader") + return + } + + if err := trader.LoadState(); err != nil { + log.WithError(err).Error("failed to load strategy states") + return + } + + // for setup mode, we don't start the trader + if err := trader.Run(ctx); err != nil { + log.WithError(err).Error("failed to start trader") + } + }() + } + + // find a free port for binding the server + ln, err := net.Listen("tcp", "127.0.0.1:"+strconv.Itoa(portNum)) + if err != nil { + log.WithError(err).Error("can not bind listener") + return + } + + defer ln.Close() + + baseURL := "http://" + ln.Addr().String() + + srv := &server.Server{ + Config: userConfig, + Environ: environ, + Trader: trader, + OpenInBrowser: false, + Setup: setup, + } + + go func() { + if err := srv.RunWithListener(ctx, ln); err != nil { + log.WithError(err).Errorf("server error") + } + }() + + log.Infof("pinging the server at %s", baseURL) + server.PingUntil(ctx, time.Second, baseURL, func() { + log.Infof("got pong, navigate to %s", baseURL) + view.Navigate(baseURL) + view.Run() + }) + + // Wait until the interrupt signal arrives or browser window is closed + sigc := make(chan os.Signal) + signal.Notify(sigc, os.Interrupt) + + select { + case <-sigc: + } + + log.Println("exiting...") +} diff --git a/cmd/update-doc/main.go b/cmd/update-doc/main.go new file mode 100644 index 0000000000..214ff608b0 --- /dev/null +++ b/cmd/update-doc/main.go @@ -0,0 +1,19 @@ +package main + +import ( + "fmt" + "github.com/c9s/bbgo/pkg/cmd" + "github.com/spf13/cobra/doc" + "log" + "path" + "runtime" +) + +func main() { + _, b, _, _ := runtime.Caller(0) + root := path.Join(path.Dir(path.Dir(path.Dir(b))), "doc", "commands") + fmt.Println(root) + if err := doc.GenMarkdownTree(cmd.RootCmd, root); err != nil { + log.Fatal(err) + } +} diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 0000000000..c4a76f25a1 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,23 @@ +codecov: + require_ci_to_pass: true + +comment: + behavior: default + layout: "reach,diff,flags,files,footer" + require_changes: no + +parsers: + gcov: + branch_detection: + conditional: yes + loop: yes + method: no + macro: no + +coverage: + precision: 2 + round: down + range: "70...100" + +github_checks: + annotations: false diff --git a/config/autoborrow.yaml b/config/autoborrow.yaml new file mode 100644 index 0000000000..6b94802f2c --- /dev/null +++ b/config/autoborrow.yaml @@ -0,0 +1,24 @@ +--- +exchangeStrategies: +- on: binance + autoborrow: + interval: 30m + autoRepayWhenDeposit: true + + # minMarginRatio for triggering auto borrow + # we trigger auto borrow only when the margin ratio is above the number + minMarginLevel: 1.5 + + # maxMarginRatio for stop auto-repay + # if the margin ratio is high enough, we don't have the urge to repay + maxMarginLevel: 10.0 + + assets: + - asset: ETH + low: 3.0 + maxQuantityPerBorrow: 1.0 + maxTotalBorrow: 10.0 + - asset: USDT + low: 1000.0 + maxQuantityPerBorrow: 100.0 + maxTotalBorrow: 10.0 diff --git a/config/backtest.yaml b/config/backtest.yaml new file mode 100644 index 0000000000..d41388b290 --- /dev/null +++ b/config/backtest.yaml @@ -0,0 +1,23 @@ +--- +backtest: + startTime: "2022-01-01" + endTime: "2022-01-02" + symbols: + - BTCUSDT + sessions: + - binance + - ftx + - max + - kucoin + - okex + +exchangeStrategies: +- on: binance + grid: + symbol: BTCUSDT + quantity: 0.001 + gridNumber: 100 + profitSpread: 1000.0 # The profit price spread that you want to add to your sell order when your buy order is executed + upperPrice: 40_000.0 + lowerPrice: 20_000.0 + diff --git a/config/binance-margin.yaml b/config/binance-margin.yaml new file mode 100644 index 0000000000..61cb4aea59 --- /dev/null +++ b/config/binance-margin.yaml @@ -0,0 +1,27 @@ +--- +sessions: + # cross margin + binance_margin: + exchange: binance + margin: true + + # isolated margin + binance_margin_linkusdt: + exchange: binance + margin: true + isolatedMargin: true + isolatedMarginSymbol: LINKUSDT + + binance_margin_dotusdt: + exchange: binance + margin: true + isolatedMargin: true + isolatedMarginSymbol: DOTUSDT + +exchangeStrategies: + +- on: binance_margin_linkusdt + dummy: + symbol: LINKUSDT + interval: 1m + diff --git a/config/bollgrid.yaml b/config/bollgrid.yaml index 913e4e9cc1..bef3371124 100644 --- a/config/bollgrid.yaml +++ b/config/bollgrid.yaml @@ -24,7 +24,7 @@ sessions: max: exchange: max - envVarPrefix: max + envVarPrefix: MAX riskControls: # This is the session-based risk controller, which let you configure different risk controller by session. @@ -52,17 +52,18 @@ backtest: symbols: - BTCUSDT account: - makerCommission: 15 - takerCommission: 15 - balances: - BTC: 0.0 - USDT: 10000.0 + max: + makerFeeRate: 15 + takerFeeRate: 15 + balances: + BTC: 0.0 + USDT: 10000.0 exchangeStrategies: - on: max bollgrid: symbol: BTCUSDT - interval: 1h - gridNumber: 100 - quantity: 0.002 - profitSpread: 10.0 + interval: 5m + gridNumber: 2 + quantity: 0.001 + profitSpread: 100.0 diff --git a/config/bollmaker.yaml b/config/bollmaker.yaml new file mode 100644 index 0000000000..f4b253d3f6 --- /dev/null +++ b/config/bollmaker.yaml @@ -0,0 +1,160 @@ +--- +persistence: + redis: + host: 127.0.0.1 + port: 6379 + db: 0 + +sessions: + binance: + exchange: binance + envVarPrefix: BINANCE + +# example command: +# godotenv -f .env.local -- go run ./cmd/bbgo backtest --sync-from 2020-11-01 --config config/grid.yaml --base-asset-baseline +backtest: + # for testing max draw down (MDD) at 03-12 + # see here for more details + # https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp + startTime: "2022-01-01" + endTime: "2022-05-12" + sessions: + - binance + symbols: + - ETHUSDT + accounts: + binance: + balances: + ETH: 0.0 + USDT: 10_000.0 + +exchangeStrategies: + +- on: binance + bollmaker: + symbol: ETHUSDT + + # interval is how long do you want to update your order price and quantity + interval: 1m + + # quantity is the base order quantity for your buy/sell order. + quantity: 0.05 + + # useTickerPrice use the ticker api to get the mid price instead of the closed kline price. + # The back-test engine is kline-based, so the ticker price api is not supported. + # Turn this on if you want to do real trading. + useTickerPrice: true + + # spread is the price spread from the middle price. + # For ask orders, the ask price is ((bestAsk + bestBid) / 2 * (1.0 + spread)) + # For bid orders, the bid price is ((bestAsk + bestBid) / 2 * (1.0 - spread)) + # Spread can be set by percentage or floating number. e.g., 0.1% or 0.001 + spread: 0.1% + + # minProfitSpread is the minimal order price spread from the current average cost. + # For long position, you will only place sell order above the price (= average cost * (1 + minProfitSpread)) + # For short position, you will only place buy order below the price (= average cost * (1 - minProfitSpread)) + minProfitSpread: 0.1% + + # EXPERIMENTAL + # Dynamic spread is an experimental feature. Use at your own risk! + # + # dynamicSpread enables the automatic adjustment to bid and ask spread. + # dynamicSpread: + # enabled: true + # # window is the window of the SMAs of spreads + # window: 1 + # askSpreadScale: + # byPercentage: + # # exp means we want to use exponential scale, you can replace "exp" with "linear" for linear scale + # exp: + # # from down to up + # domain: [ 0.0001, 0.005 ] + # # when in down band, holds 1.0 by maximum + # # when in up band, holds 0.05 by maximum + # range: [ 0.001, 0.002 ] + # bidSpreadScale: + # byPercentage: + # # exp means we want to use exponential scale, you can replace "exp" with "linear" for linear scale + # exp: + # # from down to up + # domain: [ 0.0001, 0.005 ] + # # when in down band, holds 1.0 by maximum + # # when in up band, holds 0.05 by maximum + # range: [ 0.001, 0.002 ] + + # maxExposurePosition is the maximum position you can hold + # +10 means you can hold 10 ETH long position by maximum + # -10 means you can hold -10 ETH short position by maximum + # maxExposurePosition: 3.0 + maxExposurePosition: 10 + + # dynamicExposurePositionScale overrides maxExposurePosition + # for domain, + # -1 means -100%, the price is on the lower band price. + # if the price breaks the lower band, a number less than -1 will be given. + # 1 means 100%, the price is on the upper band price. + # if the price breaks the upper band, a number greater than 1 will be given, for example, 1.2 for 120%, and 1.3 for 130%. + dynamicExposurePositionScale: + byPercentage: + # exp means we want to use exponential scale, you can replace "exp" with "linear" for linear scale + exp: + # from lower band -100% (-1) to upper band 100% (+1) + domain: [ -1, 1 ] + # when in down band, holds 1.0 by maximum + # when in up band, holds 0.05 by maximum + range: [ 10.0, 1.0 ] + + # DisableShort means you can don't want short position during the market making + # THe short here means you might sell some of your existing inventory. + disableShort: true + + # uptrendSkew, like the strongUptrendSkew, but the price is still in the default band. + uptrendSkew: 0.8 + + # downtrendSkew, like the strongDowntrendSkew, but the price is still in the default band. + downtrendSkew: 1.2 + + defaultBollinger: + interval: "1h" + window: 21 + bandWidth: 2.0 + + # neutralBollinger is the smaller range of the bollinger band + # If price is in this band, it usually means the price is oscillating. + neutralBollinger: + interval: "5m" + window: 21 + bandWidth: 2.0 + + # tradeInBand: when tradeInBand is set, you will only place orders in the bollinger band. + tradeInBand: false + + # buyBelowNeutralSMA: when this set, it will only place buy order when the current price is below the SMA line. + buyBelowNeutralSMA: false + + # Set up your stop order, this is optional + # sometimes the stop order might decrease your total profit. + # you can setup multiple stop, + stops: + # use trailing stop order + - trailingStop: + # callbackRate: when the price reaches -1% from the previous highest, we trigger the stop + callbackRate: 5.1% + + # closePosition is how much position do you want to close + closePosition: 20% + + # minProfit is how much profit you want to take. + # if you set this option, your stop will only be triggered above the average cost. + minProfit: 5% + + # interval is the time interval for checking your stop + interval: 1m + + # virtual means we don't place a a REAL stop order + # when virtual is on + # the strategy won't place a REAL stop order, instead if watches the close price, + # and if the condition matches, it submits a market order to close your position. + virtual: true + diff --git a/config/bollmaker_optimizer.yaml b/config/bollmaker_optimizer.yaml new file mode 100644 index 0000000000..e7f19b21b6 --- /dev/null +++ b/config/bollmaker_optimizer.yaml @@ -0,0 +1,29 @@ +# usage: +# +# go run ./cmd/bbgo optimize --config bollmaker_ethusdt.yaml --optimizer-config optimizer.yaml --debug +# +--- +executor: + type: local + local: + maxNumberOfProcesses: 10 + +matrix: +- type: iterate + label: interval + path: '/exchangeStrategies/0/bollmaker/interval' + values: [ "1m", "5m", "15m", "30m" ] + +- type: range + path: '/exchangeStrategies/0/bollmaker/amount' + label: amount + min: 20.0 + max: 100.0 + step: 20.0 + +- type: range + label: spread + path: '/exchangeStrategies/0/bollmaker/spread' + min: 0.1% + max: 0.2% + step: 0.01% diff --git a/config/dca.yaml b/config/dca.yaml new file mode 100644 index 0000000000..b8d08e47b5 --- /dev/null +++ b/config/dca.yaml @@ -0,0 +1,23 @@ +--- +backtest: + startTime: "2022-04-01" + endTime: "2022-05-01" + sessions: + - binance + symbols: + - BTCUSDT + accounts: + binance: + balances: + USDT: 20_000.0 + +exchangeStrategies: + +- on: binance + dca: + symbol: BTCUSDT + budgetPeriod: day + investmentInterval: 4h + budget: 1000 + + diff --git a/config/trailingstop.yaml b/config/emastop.yaml similarity index 98% rename from config/trailingstop.yaml rename to config/emastop.yaml index 2f57064693..bfc6783ac2 100644 --- a/config/trailingstop.yaml +++ b/config/emastop.yaml @@ -1,6 +1,6 @@ --- crossExchangeStrategies: -- trailingstop: +- emastop: sourceExchange: "binance" targetExchange: "max" symbol: ETHUSDT diff --git a/config/etf.yaml b/config/etf.yaml new file mode 100644 index 0000000000..3897a58aeb --- /dev/null +++ b/config/etf.yaml @@ -0,0 +1,11 @@ +exchangeStrategies: +- on: max + etf: + duration: 24h + totalAmount: 200.0 + index: + BTCUSDT: 5% + LTCUSDT: 15% + ETHUSDT: 30% + LINKUSDT: 20% + DOTUSDT: 30% diff --git a/config/ewo_dgtrd.yaml b/config/ewo_dgtrd.yaml new file mode 100644 index 0000000000..605c45e14c --- /dev/null +++ b/config/ewo_dgtrd.yaml @@ -0,0 +1,65 @@ +--- +sessions: + binance: + exchange: binance + futures: true + envVarPrefix: binance + heikinAshi: false + +exchangeStrategies: + +- on: binance + ewo_dgtrd: + symbol: MATICUSDT + # kline interval for indicators + interval: 15m + # use ema as MA + useEma: false + # use sma as MA, used when ema is false + # if both sma and ema are false, use EVMA + useSma: false + # ewo signal line window size + sigWin: 5 + # SL percentage from entry price + stoploss: 2% + # use HeikinAshi klines instead of normal OHLC + useHeikinAshi: true + # disable SL when short + disableShortStop: false + # disable SL when long + disableLongStop: false + # CCI Stochastic Indicator high filter + cciStochFilterHigh: 80 + # CCI Stochastic Indicator low filter + cciStochFilterLow: 20 + # ewo change rate histogram's upperbound filter + # set to 1 would intend to let all ewo pass + ewoChangeFilterHigh: 1. + # ewo change rate histogram's lowerbound filter + # set to 0 would intend to let all ewo pass + ewoChangeFilterLow: 0.0 + # print record exit point in log messages + record: false + +sync: + userDataStream: + trades: true + filledOrders: true + sessions: + - binance + symbols: + - MATICUSDT + +backtest: + startTime: "2022-05-01" + endTime: "2022-05-27" + symbols: + - MATICUSDT + sessions: [binance] + accounts: + binance: + #makerFeeRate: 0 + #takerFeeRate: 15 + balances: + MATIC: 000.0 + USDT: 15000.0 diff --git a/config/factorzoo.yaml b/config/factorzoo.yaml new file mode 100644 index 0000000000..df83d50ee0 --- /dev/null +++ b/config/factorzoo.yaml @@ -0,0 +1,30 @@ +sessions: + binance: + exchange: binance + envVarPrefix: binance +# futures: true + + +exchangeStrategies: +- on: binance + factorzoo: + symbol: BTCUSDT + interval: 12h # T:20/12h + quantity: 0.95 + + +backtest: + sessions: + - binance + # for testing max draw down (MDD) at 03-12 + # see here for more details + # https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp + startTime: "2022-03-15" + endTime: "2022-04-13" + symbols: + - BTCUSDT + accounts: + binance: + balances: + BTC: 1.0 + USDT: 45_000.0 diff --git a/config/fmaker.yaml b/config/fmaker.yaml new file mode 100644 index 0000000000..e1993fab36 --- /dev/null +++ b/config/fmaker.yaml @@ -0,0 +1,26 @@ +sessions: + binance: + exchange: binance + envVarPrefix: binance + + +exchangeStrategies: +- on: binance + fmaker: + symbol: BTCUSDT + interval: 1m + spread: 0.15% + amount: 300 # 11 + +backtest: + sessions: + - binance + startTime: "2022-01-01" + endTime: "2022-05-31" + symbols: + - BTCUSDT + account: + binance: + balances: + BTC: 1 # 1 + USDT: 45_000 # 30_000 diff --git a/config/funding.yaml b/config/funding.yaml new file mode 100644 index 0000000000..0fcd433ed5 --- /dev/null +++ b/config/funding.yaml @@ -0,0 +1,38 @@ +--- +notifications: + slack: + defaultChannel: "dev-bbgo" + errorChannel: "bbgo-error" + + # if you want to route channel by symbol + symbolChannels: + "^BTC": "btc" + "^ETH": "eth" + + # object routing rules + routing: + trade: "$symbol" + order: "$symbol" + submitOrder: "$session" # not supported yet + pnL: "bbgo-pnl" + +sessions: + binance: + exchange: binance + envVarPrefix: binance + futures: true + +exchangeStrategies: +- on: binance + funding: + symbol: ETHUSDT + quantity: 0.0001 + fundingRate: + high: 0.01% + supportDetection: + - interval: 1m + movingAverageType: EMA + movingAverageIntervalWindow: + interval: 15m + window: 60 + minVolume: 8_000 diff --git a/config/grid-usdttwd.yaml b/config/grid-usdttwd.yaml index 39b906489a..29eb06c8a8 100644 --- a/config/grid-usdttwd.yaml +++ b/config/grid-usdttwd.yaml @@ -17,34 +17,34 @@ riskControls: # basic risk control order executor basic: minQuoteBalance: 100.0 # keep 100 twd - maxBaseAssetBalance: 1000.0 - minBaseAssetBalance: 200.0 + maxBaseAssetBalance: 100_000.0 + minBaseAssetBalance: 1_000.0 maxOrderAmount: 2000.0 # 1000 twd backtest: # for testing max draw down (MDD) at 03-12 # see here for more details # https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp - startTime: "2020-09-04" - endTime: "2020-09-14" + startTime: "2021-01-01" + endTime: "2021-01-30" symbols: - - BTCUSDT - - ETHUSDT - USDTTWD account: - makerCommission: 15 - takerCommission: 15 - balances: - BTC: 0.0 - USDT: 10000.0 + max: + makerFeeRate: 15 + takerFeeRate: 15 + balances: + BTC: 0.0 + USDT: 10_000.0 + TWD: 100_000.0 exchangeStrategies: - on: max grid: symbol: USDTTWD quantity: 10.0 # 10 USDT per grid - gridNumber: 50 # 50 GRID, Total Amount will be 10 USDT * 50 GRID = 500 USDT + gridNumber: 100 # 50 GRID, Total Amount will be 10 USDT * 50 GRID = 500 USDT profitSpread: 0.1 # When buying USDT at 28.1, we will sell it at 28.1 + 0.1 = 28.2, When selling USDT at 28.1, we will buy it back at 28.1 - 0.1 = 28.0 - upperPrice: 28.50 + upperPrice: 28.90 lowerPrice: 27.90 long: true # long mode means we don't keep cash when we sell usdt, we will use the same amount of twd to buy more usdt back diff --git a/config/grid.yaml b/config/grid.yaml index 64f566815b..4704c811b8 100644 --- a/config/grid.yaml +++ b/config/grid.yaml @@ -4,9 +4,9 @@ sessions: exchange: binance envVarPrefix: binance - max: - exchange: max - envVarPrefix: max + #max: + # exchange: max + # envVarPrefix: max riskControls: # This is the session-based risk controller, which let you configure different risk controller by session. @@ -25,28 +25,37 @@ riskControls: minBaseAssetBalance: 0.0 maxOrderAmount: 1000.0 +# example command: +# godotenv -f .env.local -- go run ./cmd/bbgo backtest --sync-from 2020-11-01 --config config/grid.yaml --base-asset-baseline backtest: # for testing max draw down (MDD) at 03-12 # see here for more details # https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp - startTime: "2020-09-04" - endTime: "2020-09-14" + startTime: "2022-05-09" + endTime: "2022-05-20" symbols: - - BTCUSDT - - ETHUSDT - account: - makerCommission: 15 - takerCommission: 15 - balances: - BTC: 0.0 - USDT: 10000.0 + - BTCUSDT + sessions: [binance] + accounts: + binance: + balances: + BTC: 0.0 + USDT: 10000.0 exchangeStrategies: -- on: max + +- on: binance grid: symbol: BTCUSDT quantity: 0.001 - gridNumber: 30 - profitSpread: 50.0 - upperPrice: 26800.0 - lowerPrice: 26500.0 + # scaleQuantity: + # byPrice: + # exp: + # domain: [20_000, 30_000] + # range: [0.2, 0.001] + gridNumber: 20 + profitSpread: 1000.0 # The profit price spread that you want to add to your sell order when your buy order is executed + upperPrice: 30_000.0 + lowerPrice: 28_000.0 + # long: true # The sell order is submitted in the same order amount as the filled corresponding buy order, rather than the same quantity. + diff --git a/config/infinity-grid.yaml b/config/infinity-grid.yaml new file mode 100644 index 0000000000..7a73b8cf6a --- /dev/null +++ b/config/infinity-grid.yaml @@ -0,0 +1,69 @@ +--- +sessions: + binance: + exchange: binance + envVarPrefix: binance + + #max: + # exchange: max + # envVarPrefix: max + +riskControls: + # This is the session-based risk controller, which let you configure different risk controller by session. + sessionBased: + # "max" is the session name that you want to configure the risk control + max: + # orderExecutor is one of the risk control + orderExecutor: + # symbol-routed order executor + bySymbol: + ETHUSDT: + # basic risk control order executor + basic: + minQuoteBalance: 10.0 + maxBaseAssetBalance: 3.0 + minBaseAssetBalance: 0.0 + maxOrderAmount: 1000.0 + +# example command: +# godotenv -f .env.local -- go run ./cmd/bbgo backtest --sync-from 2020-11-01 --config config/grid.yaml --base-asset-baseline +backtest: + startTime: "2022-06-01" + endTime: "2022-06-17" + symbols: + - ETHUSDT + sessions: [binance] + accounts: + binance: + balances: + ETH: 0 + USDT: 5000.0 + +exchangeStrategies: + +#- on: binance + #grid: + #symbol: BTCUSDT + ##quantity: 0.001 + ##amount: 50 + #quantityScale: + #byPrice: + #exp: + #domain: [20_000, 70_000] + #range: [0.001, 0.0007] + #gridNumber: 33 # 33: spread = 1785 + #profitSpread: 2000.0 # The profit price spread that you want to add to your sell order when your buy order is executed + #upperPrice: 70_000.0 + #lowerPrice: 20_000.0 + #long: true # The sell order is submitted in the same order amount as the filled corresponding buy order, rather than the same quantity. +- on: binance + infinity-grid: + symbol: ETHUSDT + interval: 1m + quantity: 0.05 + initialOrderQuantity: 1 + gridNumber: 10 + countOfMoreOrders: 3 # +2 + margin: 0.030 + lowerPrice: 700 + long: true diff --git a/config/marketcap.yaml b/config/marketcap.yaml new file mode 100644 index 0000000000..47bc14796a --- /dev/null +++ b/config/marketcap.yaml @@ -0,0 +1,20 @@ +--- +notifications: + slack: + defaultChannel: "bbgo" + errorChannel: "bbgo-error" + +exchangeStrategies: + - on: max + marketcap: + interval: 1m + baseCurrency: TWD + baseWeight: 2% + targetCurrencies: + - BTC + - ETH + - MATIC + threshold: 2% + # max amount to buy or sell per order + maxAmount: 1_000 + dryRun: true diff --git a/config/max-margin.yaml b/config/max-margin.yaml new file mode 100644 index 0000000000..ad7cc7588c --- /dev/null +++ b/config/max-margin.yaml @@ -0,0 +1,35 @@ +--- +sessions: + max_margin: + exchange: max + margin: true + +sync: + # userDataStream is used to sync the trading data in real-time + # it uses the websocket connection to insert the trades + userDataStream: + trades: false + filledOrders: false + + # since is the start date of your trading data + since: 2019-11-01 + + # sessions is the list of session names you want to sync + # by default, BBGO sync all your available sessions. + sessions: + - max_margin + + # symbols is the list of symbols you want to sync + # by default, BBGO try to guess your symbols by your existing account balances. + symbols: + - BTCUSDT + - ETHUSDT + + +exchangeStrategies: + +- on: max_margin + pricealert: + symbol: LINKUSDT + interval: 1m + diff --git a/config/optimizer.yaml b/config/optimizer.yaml new file mode 100644 index 0000000000..ad3621c1d3 --- /dev/null +++ b/config/optimizer.yaml @@ -0,0 +1,26 @@ +# usage: +# +# go run ./cmd/bbgo optimize --config bollmaker_ethusdt.yaml --optimizer-config optimizer.yaml --debug +# +--- +executor: + type: local + local: + maxNumberOfProcesses: 10 + +matrix: +- type: iterate + path: '/exchangeStrategies/0/bollmaker/interval' + values: ["1m", "5m"] + +- type: range + path: '/exchangeStrategies/0/bollmaker/amount' + min: 20.0 + max: 40.0 + step: 20.0 + +- type: range + path: '/exchangeStrategies/0/bollmaker/spread' + min: 0.1% + max: 0.2% + step: 0.02% diff --git a/config/pivotshort-ETHUSDT.yaml b/config/pivotshort-ETHUSDT.yaml new file mode 100644 index 0000000000..b533e8e3ad --- /dev/null +++ b/config/pivotshort-ETHUSDT.yaml @@ -0,0 +1,98 @@ +--- +sessions: + binance: + exchange: binance + envVarPrefix: binance + margin: true + # isolatedMargin: true + # isolatedMarginSymbol: ETHUSDT + +exchangeStrategies: +- on: binance + pivotshort: + symbol: ETHUSDT + + # interval is the main pivot interval + interval: 5m + + # window is the main pivot window + window: 200 + + # breakLow settings are used for shorting when the current price break the previous low + breakLow: + # ratio is how much the price breaks the previous low to trigger the short. + ratio: 0.1% + + # quantity is used for submitting the sell order + # if quantity is not set, all base balance will be used for selling the short. + quantity: 10.0 + + # marketOrder submits the market sell order when the closed price is lower than the previous pivot low. + marketOrder: true + + # bounceRatio is used for calculating the price of the limit sell order. + # it's ratio of pivot low bounce when a new pivot low is detected. + # Sometimes when the price breaks the previous low, the price might be pulled back to a higher price. + # The bounceRatio is useful for such case, however, you might also miss the chance to short at the price if there is no pull back. + # Notice: When marketOrder is set, bounceRatio will not be used. + # bounceRatio: 0.1% + + # stopEMARange is the price range we allow short. + # Short-allowed price range = [current price] > [EMA] * (1 - [stopEMARange]) + stopEMARange: 0% + stopEMA: + interval: 1h + window: 99 + + bounceShort: + enabled: false + interval: 1h + window: 10 + quantity: 10.0 + minDistance: 3% + # stopLossPercentage: 1% + + # ratio is the ratio of the resistance price, + # higher the ratio, lower the price + # first_layer_price = resistance_price * (1 - ratio) + # second_layer_price = (resistance_price * (1 - ratio)) * (2 * layerSpread) + ratio: 0% + numOfLayers: 1 + layerSpread: 0.1% + + exit: + # roiStopLossPercentage is the stop loss percentage of the position ROI (currently the price change) + roiStopLossPercentage: 2% + + # roiTakeProfitPercentage is used to force taking profit by percentage of the position ROI (currently the price change) + # force to take the profit ROI exceeded the percentage. + roiTakeProfitPercentage: 30% + + # roiMinTakeProfitPercentage applies to lowerShadowRatio and cumulatedVolume exit options + roiMinTakeProfitPercentage: 10% + + # lowerShadowRatio is used to taking profit when the (lower shadow height / low price) > lowerShadowRatio + # you can grab a simple stats by the following SQL: + # SELECT ((close - low) / close) AS shadow_ratio FROM binance_klines WHERE symbol = 'ETHUSDT' AND `interval` = '5m' AND start_time > '2022-01-01' ORDER BY shadow_ratio DESC LIMIT 20; + lowerShadowRatio: 3% + + # cumulatedVolume is used to take profit when the cumulated quote volume from the klines exceeded a threshold + cumulatedVolume: + enabled: true + minQuoteVolume: 90_000_000 + window: 5 + + marginOrderSideEffect: repay + +backtest: + sessions: + - binance + startTime: "2022-04-01" + endTime: "2022-06-08" + symbols: + - ETHUSDT + accounts: + binance: + balances: + ETH: 10.0 + USDT: 3000.0 diff --git a/config/pivotshort-GMTBUSD.yaml b/config/pivotshort-GMTBUSD.yaml new file mode 100644 index 0000000000..0e3fa77245 --- /dev/null +++ b/config/pivotshort-GMTBUSD.yaml @@ -0,0 +1,43 @@ +--- +sessions: + binance: + exchange: binance + envVarPrefix: binance + margin: true + isolatedMargin: true + isolatedMarginSymbol: GMTBUSD + # futures: true + +exchangeStrategies: +- on: binance + pivotshort: + symbol: GMTBUSD + interval: 5m + window: 120 + + entry: + immediate: true + catBounceRatio: 1% + quantity: 20 + numLayers: 3 + marginOrderSideEffect: borrow + + exit: + takeProfitPercentage: 13% + stopLossPercentage: 0.5% + shadowTakeProfitRatio: 3% + marginOrderSideEffect: repay + + +backtest: + sessions: + - binance + startTime: "2022-05-25" + endTime: "2022-06-03" + symbols: + - GMTBUSD + accounts: + binance: + balances: + GMT: 3_000.0 + USDT: 3_000.0 diff --git a/config/pivotshort.yaml b/config/pivotshort.yaml new file mode 100644 index 0000000000..ec80ca3512 --- /dev/null +++ b/config/pivotshort.yaml @@ -0,0 +1,64 @@ +--- +sessions: + binance: + exchange: binance + envVarPrefix: binance +# margin: true +# isolatedMargin: true +# isolatedMarginSymbol: GMTUSDT +# futures: true + +exchangeStrategies: +- on: binance + pivotshort: + symbol: GMTUSDT + interval: 5m + + window: 120 + + # breakLow settings are used for shorting when the current price break the previous low + breakLow: + ratio: 0.1% + quantity: 10.0 + stopEMARange: 5% + stopEMA: + interval: 1h + window: 99 + + exit: + # roiStopLossPercentage is the stop loss percentage of the position ROI (currently the price change) + roiStopLossPercentage: 1% + + # roiTakeProfitPercentage is the take profit percentage of the position ROI (currently the price change) + # force to take the profit ROI exceeded the percentage. + roiTakeProfitPercentage: 25% + + # roiMinTakeProfitPercentage applies to lowerShadowRatio and cumulatedVolume exit options + roiMinTakeProfitPercentage: 10% + + # lowerShadowRatio is used to force taking profit when the (lower shadow height / low price) > lowerShadowRatio + # you can grab a simple stats by the following SQL: + # SELECT ((close - low) / close) AS shadow_ratio FROM binance_klines WHERE symbol = 'ETHUSDT' AND `interval` = '5m' AND start_time > '2022-01-01' ORDER BY shadow_ratio DESC LIMIT 20; + lowerShadowRatio: 3% + + # cumulatedVolume is used to take profit when the cumulated quote volume from the klines exceeded a threshold + cumulatedVolume: + enabled: false + minQuoteVolume: 90_000_000 + window: 5 + + marginOrderSideEffect: repay + + +backtest: + sessions: + - binance + startTime: "2022-05-01" + endTime: "2022-06-03" + symbols: + - GMTUSDT + accounts: + binance: + balances: + GMT: 3010.0 + USDT: 1000.0 diff --git a/config/pivotshort_optimizer.yaml b/config/pivotshort_optimizer.yaml new file mode 100644 index 0000000000..c327088266 --- /dev/null +++ b/config/pivotshort_optimizer.yaml @@ -0,0 +1,52 @@ +# usage: +# +# go run ./cmd/bbgo optimize --config config/pivotshort-ETHUSDT.yaml --optimizer-config config/pivotshort_optimizer.yaml --debug +# +--- +executor: + type: local + local: + maxNumberOfProcesses: 10 + +matrix: + +- type: iterate + label: interval + path: '/exchangeStrategies/0/pivotshort/interval' + values: [ "1m", "5m", "30m" ] + +- type: range + path: '/exchangeStrategies/0/pivotshort/window' + label: window + min: 100.0 + max: 200.0 + step: 20.0 + +# - type: range +# path: '/exchangeStrategies/0/pivotshort/breakLow/stopEMARange' +# label: stopEMARange +# min: 0% +# max: 10% +# step: 1% + +# - type: range +# path: '/exchangeStrategies/0/pivotshort/exit/roiStopLossPercentage' +# label: roiStopLossPercentage +# min: 0.5% +# max: 2% +# step: 0.5% +# +# - type: range +# path: '/exchangeStrategies/0/pivotshort/exit/roiTakeProfitPercentage' +# label: roiTakeProfitPercentage +# min: 10% +# max: 50% +# step: 5% +# +# - type: range +# path: '/exchangeStrategies/0/pivotshort/exit/roiMinTakeProfitPercentage' +# label: roiMinTakeProfitPercentage +# min: 3% +# max: 10% +# step: 1% +# diff --git a/config/pricealert-tg.yaml b/config/pricealert-tg.yaml new file mode 100644 index 0000000000..2021160d3f --- /dev/null +++ b/config/pricealert-tg.yaml @@ -0,0 +1,12 @@ +--- +sessions: + binance: + exchange: binance + envVarPrefix: binance + +exchangeStrategies: +- on: binance + pricealert: + symbol: "BTCUSDT" + interval: "1m" + minChange: 300 \ No newline at end of file diff --git a/config/buyandhold.yaml b/config/pricedrop.yaml similarity index 85% rename from config/buyandhold.yaml rename to config/pricedrop.yaml index d3057a1c90..e650a981cf 100644 --- a/config/buyandhold.yaml +++ b/config/pricedrop.yaml @@ -40,22 +40,21 @@ backtest: # for testing max draw down (MDD) at 03-12 # see here for more details # https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp - startTime: "2020-01-01" - endTime: "2020-01-15" + startTime: "2022-01-01" + endTime: "2022-01-15" symbols: - BTCUSDT account: - makerCommission: 15 - takerCommission: 15 - buyerCommission: 0 - sellerCommission: 0 - balances: - BTC: 0.1 - USDT: 10000.0 + binance: + makerFeeRate: 15 + takerFeeRate: 15 + balances: + BTC: 0.1 + USDT: 10000.0 exchangeStrategies: - on: binance - buyandhold: + pricedrop: symbol: "BTCUSDT" interval: "1m" baseQuantity: 0.001 diff --git a/config/rebalance.yaml b/config/rebalance.yaml new file mode 100644 index 0000000000..6780da1d3d --- /dev/null +++ b/config/rebalance.yaml @@ -0,0 +1,21 @@ +--- +notifications: + slack: + defaultChannel: "bbgo" + errorChannel: "bbgo-error" + +exchangeStrategies: + - on: max + rebalance: + interval: 1d + baseCurrency: TWD + targetWeights: + BTC: 40% + ETH: 20% + MAX: 10% + USDT: 15% + TWD: 15% + threshold: 2% + # max amount to buy or sell per order + maxAmount: 10_000 + dryRun: false diff --git a/config/rsmaker.yaml b/config/rsmaker.yaml new file mode 100644 index 0000000000..a19941d18e --- /dev/null +++ b/config/rsmaker.yaml @@ -0,0 +1,101 @@ +--- +persistence: + redis: + host: 127.0.0.1 + port: 6379 + db: 0 + +sync: + # userDataStream is used to sync the trading data in real-time + # it uses the websocket connection to insert the trades + userDataStream: + trades: true + filledOrders: true + + # since is the start date of your trading data + since: 2021-08-01 + + # sessions is the list of session names you want to sync + # by default, BBGO sync all your available sessions. + sessions: + - binance + + # symbols is the list of symbols you want to sync + # by default, BBGO try to guess your symbols by your existing account balances. + symbols: + - NEARBUSD + - BTCUSDT + - ETHUSDT + - LINKUSDT + - BNBUSDT + - DOTUSDT + - DOTBUSD + + +sessions: + binance: + exchange: binance + envVarPrefix: binance +# futures: true + + +exchangeStrategies: +- on: binance + rsmaker: + symbol: BTCBUSD + interval: 1m +# quantity: 40 + amount: 20 + minProfitSpread: 0.1% + +# uptrendSkew: 0.7 + + # downtrendSkew, like the strongDowntrendSkew, but the price is still in the default band. +# downtrendSkew: 1.3 + + # tradeInBand: when tradeInBand is set, you will only place orders in the bollinger band. +# tradeInBand: true + + # buyBelowNeutralSMA: when this set, it will only place buy order when the current price is below the SMA line. +# buyBelowNeutralSMA: true + + defaultBollinger: + interval: "1h" + window: 21 + bandWidth: 2.0 + + # neutralBollinger is the smaller range of the bollinger band + # If price is in this band, it usually means the price is oscillating. + neutralBollinger: + interval: "5m" + window: 21 + bandWidth: 2.0 + + dynamicExposurePositionScale: + byPercentage: + # exp means we want to use exponential scale, you can replace "exp" with "linear" for linear scale + exp: + # from lower band -100% (-1) to upper band 100% (+1) + domain: [ -2, 2 ] + # when in down band, holds 1.0 by maximum + # when in up band, holds 0.05 by maximum + range: [ 1, 0.01 ] + + + +backtest: + sessions: + - binance + # for testing max draw down (MDD) at 03-12 + # see here for more details + # https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp + startTime: "2022-03-26" + endTime: "2022-04-12" + symbols: + - BTCBUSD + account: + binance: + makerFeeRate: 0.0 + balances: + BTC: 1 + BUSD: 45_000.0 diff --git a/config/schedule-btcusdt.yaml b/config/schedule-btcusdt.yaml new file mode 100644 index 0000000000..5ab8ce2aa1 --- /dev/null +++ b/config/schedule-btcusdt.yaml @@ -0,0 +1,30 @@ +--- +riskControls: + # This is the session-based risk controller, which let you configure different risk controller by session. + sessionBased: + # "max" is the session name that you want to configure the risk control + max: + # orderExecutor is one of the risk control + orderExecutor: + # symbol-routed order executor + bySymbol: + BTCUSDT: + # basic risk control order executor + basic: + minQuoteBalance: 1000.0 + maxBaseAssetBalance: 500.0 + minBaseAssetBalance: 300.0 + maxOrderAmount: 1000.0 + +exchangeStrategies: + +- on: max + schedule: + interval: 1h + symbol: BTCUSDT + side: buy + quantity: 0.001 + belowMovingAverage: + type: EWMA + interval: 1h + window: 99 diff --git a/config/schedule-ethusdt.yaml b/config/schedule-ethusdt.yaml new file mode 100644 index 0000000000..ae40ba68e0 --- /dev/null +++ b/config/schedule-ethusdt.yaml @@ -0,0 +1,83 @@ +--- + +sessions: + binance: + exchange: binance + envVarPrefix: binance + +# time godotenv -f .env.local -- go run ./cmd/bbgo backtest --base-asset-baseline --config config/schedule-ethusdt.yaml -v +backtest: + startTime: "2021-08-01" + endTime: "2021-08-07" + symbols: + - ETHUSDT + account: + binance: + balances: + ETH: 1.0 + USDT: 20_000.0 + +riskControls: + # This is the session-based risk controller, which let you configure different risk controller by session. + sessionBased: + # "max" is the session name that you want to configure the risk control + max: + # orderExecutor is one of the risk control + orderExecutor: + # symbol-routed order executor + bySymbol: + ETHUSDT: + # basic risk control order executor + basic: + minQuoteBalance: 1000.0 + maxBaseAssetBalance: 500.0 + minBaseAssetBalance: 300.0 + maxOrderAmount: 1000.0 + +exchangeStrategies: + +- on: binance + schedule: + # trigger schedule per hour + # valid intervals are: 1m, 5m, 15m, 30m, 1h, 2h, 4h, 6h, 12h, 1d + interval: 1h + + symbol: ETHUSDT + side: buy + + # quantity is the quantity of the crypto (in base currency) you want to buy/sell + # quantity: 0.01 + + # amount is the quote quantity of the crypto (in quote currency), here is USDT. + # 11.0 means you want to buy ETH with 11 USDT. + # the quantity will be calculated automatically, according to the latest price + amount: 11.0 + + # belowMovingAverage is a special override (optional) + # execute order only when the closed price is below the moving average line. + # you can open the app to adjust your parameters here. + # the interval here could be different from the triggering interval. + belowMovingAverage: + type: EWMA + interval: 1h + window: 99 + + # you can override the default side + side: buy + + # you can choose one of quantity or amount + # quantity: 0.05 + # amount is how much quote balance you want to buy + # here 11.0 means you want to buy ETH with 11.0 USDT + # please note that crypto exchange requires you to submit an order above the min notional limit $10 usdt + amount: 11.0 + + # aboveMovingAverage is a special override (optional) + # aboveMovingAverage: + # type: EWMA + # interval: 1h + # window: 99 + # side: sell + # # quantity: 0.05 + # amount: 11.0 + diff --git a/config/schedule.yaml b/config/schedule.yaml new file mode 100644 index 0000000000..fa231846e7 --- /dev/null +++ b/config/schedule.yaml @@ -0,0 +1,39 @@ +--- +riskControls: + # This is the session-based risk controller, which let you configure different risk controller by session. + sessionBased: + # "max" is the session name that you want to configure the risk control + max: + # orderExecutor is one of the risk control + orderExecutor: + # symbol-routed order executor + bySymbol: + USDTTWD: + # basic risk control order executor + basic: + minQuoteBalance: 100.0 + maxBaseAssetBalance: 30_000.0 + minBaseAssetBalance: 0.0 + maxOrderAmount: 1_000.0 + +exchangeStrategies: + +- on: max + schedule: + interval: 1m + symbol: USDTTWD + side: buy + quantity: 10 + + aboveMovingAverage: + type: EWMA + interval: 1h + window: 99 + side: sell + + belowMovingAverage: + type: EWMA + interval: 1h + window: 99 + side: buy + diff --git a/config/skeleton.yaml b/config/skeleton.yaml new file mode 100644 index 0000000000..d801df353f --- /dev/null +++ b/config/skeleton.yaml @@ -0,0 +1,24 @@ +--- +sessions: + binance: + exchange: binance + heikinAshi: true + envVarPrefix: binance + +exchangeStrategies: + +- on: binance + skeleton: + symbol: BNBBUSD + +backtest: + startTime: "2022-06-14" + endTime: "2022-06-15" + symbols: + - BNBBUSD + sessions: [binance] + account: + binance: + balances: + BNB: 0 + BUSD: 10000 diff --git a/config/supertrend.yaml b/config/supertrend.yaml new file mode 100644 index 0000000000..db880bce23 --- /dev/null +++ b/config/supertrend.yaml @@ -0,0 +1,62 @@ +--- +persistence: + redis: + host: 127.0.0.1 + port: 6379 + db: 0 + +sessions: + binance: + exchange: binance + envVarPrefix: binance + margin: true + isolatedMargin: true + isolatedMarginSymbol: BTCUSDT + +backtest: + sessions: [binance] + # for testing max draw down (MDD) at 03-12 + # see here for more details + # https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp + startTime: "2022-04-01" + endTime: "2022-04-30" + symbols: + - BTCUSDT + accounts: + binance: + makerCommission: 10 # 0.15% + takerCommission: 15 # 0.15% + balances: + BTC: 1.0 + USDT: 10000.0 + +exchangeStrategies: +- on: binance + supertrend: + symbol: BTCUSDT + + # interval is how long do you want to update your order price and quantity + interval: 1h + + # leverage is the leverage of the orders + leverage: 1.0 + + # fastDEMAWindow and slowDEMAWindow are for filtering super trend noise + fastDEMAWindow: 144 + slowDEMAWindow: 169 + + # Supertrend indicator parameters + superTrend: + # ATR window used by Supertrend + averageTrueRangeWindow: 39 + # ATR Multiplier for calculating super trend prices, the higher, the stronger the trends are + averageTrueRangeMultiplier: 3 + + # TP according to ATR multiple, 0 to disable this + takeProfitMultiplier: 3 + + # Set SL price to the low of the triggering Kline + stopLossByTriggeringK: true + + # TP/SL by reversed signals + tpslBySignal: true diff --git a/config/support-margin.yaml b/config/support-margin.yaml new file mode 100644 index 0000000000..bc28cea49f --- /dev/null +++ b/config/support-margin.yaml @@ -0,0 +1,78 @@ +--- +notifications: + slack: + defaultChannel: "dev-bbgo" + errorChannel: "bbgo-error" + + # if you want to route channel by symbol + symbolChannels: + "^BTC": "btc" + "^ETH": "eth" + "^BNB": "bnb" + + # object routing rules + routing: + trade: "$symbol" + order: "$symbol" + submitOrder: "$session" # not supported yet + pnL: "bbgo-pnl" + +sessions: + binance_margin_linkusdt: + exchange: binance + margin: true + isolatedMargin: true + isolatedMarginSymbol: LINKUSDT + +riskControls: + # This is the session-based risk controller, which let you configure different risk controller by session. + sessionBased: + max: + orderExecutor: + bySymbol: + BTCUSDT: + basic: + minQuoteBalance: 100.0 + maxBaseAssetBalance: 3.0 + minBaseAssetBalance: 0.0 + maxOrderAmount: 1000.0 + +backtest: + # for testing max draw down (MDD) at 03-12 + # see here for more details + # https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp + startTime: "2020-09-04" + endTime: "2020-09-14" + symbols: + - LINKUSDT + account: + binance: + makerFeeRate: 15 + takerFeeRate: 15 + balances: + LINK: 0.0 + USDT: 10000.0 + +exchangeStrategies: + +- on: binance_margin_linkusdt + support: + symbol: LINKUSDT + interval: 1m + minVolume: 2_000 + marginOrderSideEffect: borrow + + scaleQuantity: + byVolume: + exp: + domain: [ 1_000, 200_000 ] + range: [ 3.0, 5.0 ] + + maxBaseAssetBalance: 1000.0 + minQuoteAssetBalance: 2000.0 + + targets: + - profitPercentage: 0.02 + quantityPercentage: 0.5 + marginOrderSideEffect: repay + diff --git a/config/support.yaml b/config/support.yaml new file mode 100644 index 0000000000..3f45bb2b27 --- /dev/null +++ b/config/support.yaml @@ -0,0 +1,57 @@ +--- +notifications: + slack: + defaultChannel: "dev-bbgo" + errorChannel: "bbgo-error" + # object routing rules + routing: + trade: "$symbol" + order: "$symbol" + submitOrder: "$session" # not supported yet + +sessions: + binance: + exchange: binance + +backtest: + startTime: "2021-09-01" + endTime: "2021-09-30" + sessions: + - binance + symbols: + - LINKUSDT + account: + binance: + balances: + USDT: 10000.0 + +exchangeStrategies: + +- on: binance + support: + symbol: LINKUSDT + interval: 5m + minVolume: 80_000 + triggerMovingAverage: + interval: 5m + window: 99 + longTermMovingAverage: + interval: 1h + window: 99 + + scaleQuantity: + byVolume: + exp: + domain: [ 10_000, 200_000 ] + range: [ 0.5, 1.0 ] + + maxBaseAssetBalance: 1000.0 + minQuoteAssetBalance: 2000.0 + + trailingStopTarget: + callbackRatio: 1.5% + minimumProfitPercentage: 2% + + targets: + - profitPercentage: 0.02 + quantityPercentage: 0.5 diff --git a/config/sync.yaml b/config/sync.yaml new file mode 100644 index 0000000000..3a24c9844d --- /dev/null +++ b/config/sync.yaml @@ -0,0 +1,64 @@ +--- +sessions: + binance: + exchange: binance + envVarPrefix: binance + + binance_margin_dotusdt: + exchange: binance + envVarPrefix: binance + margin: true + isolatedMargin: true + isolatedMarginSymbol: DOTUSDT + + max: + exchange: max + envVarPrefix: max + + kucoin: + exchange: kucoin + envVarPrefix: kucoin + + okex: + exchange: okex + envVarPrefix: okex + +sync: + # userDataStream is used to sync the trading data in real-time + # it uses the websocket connection to insert the trades + userDataStream: + trades: true + filledOrders: true + + # since is the start date of your trading data + since: 2019-01-01 + + # sessions is the list of session names you want to sync + # by default, BBGO sync all your available sessions. + sessions: + - binance + - binance_margin_dotusdt + - max + - okex + - kucoin + + # symbols is the list of symbols you want to sync + # by default, BBGO try to guess your symbols by your existing account balances. + symbols: + - BTCUSDT + - ETHUSDT + - DOTUSDT + - binance:BNBUSDT + - max:USDTTWD + + # marginHistory enables the margin history sync + marginHistory: true + + # marginAssets lists the assets that are used in the margin. + # including loan, repay, interest and liquidation + marginAssets: + - USDT + + depositHistory: true + rewardHistory: true + withdrawHistory: true diff --git a/config/wall.yaml b/config/wall.yaml new file mode 100644 index 0000000000..6280882812 --- /dev/null +++ b/config/wall.yaml @@ -0,0 +1,38 @@ +--- +persistence: + redis: + host: 127.0.0.1 + port: 6379 + db: 0 + +sessions: + max: + exchange: max + envVarPrefix: MAX + +exchangeStrategies: + +- on: max + wall: + symbol: DOTUSDT + + # interval is how long do you want to update your order price and quantity + interval: 1m + + fixedPrice: 2.0 + + side: buy + + # quantity is the base order quantity for your buy/sell order. + # quantity: 0.05 + + numLayers: 3 + layerSpread: 0.1 + + quantityScale: + byLayer: + linear: + domain: [ 1, 3 ] + range: [ 10.0, 30.0 ] + + diff --git a/config/xbalance.yaml b/config/xbalance.yaml new file mode 100644 index 0000000000..a437fc2ec9 --- /dev/null +++ b/config/xbalance.yaml @@ -0,0 +1,51 @@ +--- +notifications: + slack: + defaultChannel: "dev-bbgo" + errorChannel: "bbgo-error" + + # if you want to route channel by symbol + symbolChannels: + "^BTC": "btc" + "^ETH": "eth" + + # if you want to route channel by exchange session + sessionChannels: + max: "bbgo-max" + binance: "bbgo-binance" + + # routing rules + routing: + trade: "$symbol" + order: "$slient" + submitOrder: "$slient" + pnL: "bbgo-pnl" + +sessions: + max: + exchange: max + envVarPrefix: max + + binance: + exchange: binance + envVarPrefix: binance + +#persistence: +# json: +# directory: var/data +# redis: +# host: 127.0.0.1 +# port: 6379 +# db: 0 + +crossExchangeStrategies: + +- xbalance: + interval: 1h + asset: USDT + addresses: + binance: your_whitelisted_address + max: your_whitelisted_address + low: 5000 + middle: 6000 + diff --git a/config/xgap.yaml b/config/xgap.yaml new file mode 100644 index 0000000000..89d7329869 --- /dev/null +++ b/config/xgap.yaml @@ -0,0 +1,53 @@ +--- +notifications: + slack: + defaultChannel: "dev-bbgo" + errorChannel: "bbgo-error" + + # if you want to route channel by symbol + symbolChannels: + "^BTC": "btc" + "^ETH": "eth" + + # if you want to route channel by exchange session + sessionChannels: + max: "bbgo-max" + binance: "bbgo-binance" + + # routing rules + routing: + trade: "$silent" + order: "$silent" + submitOrder: "$silent" + pnL: "bbgo-pnl" + +persistence: + json: + directory: var/data + redis: + host: 127.0.0.1 + port: 6379 + db: 0 + +sessions: + max: + exchange: max + envVarPrefix: max + + binance: + exchange: binance + envVarPrefix: binance + publicOnly: true + +crossExchangeStrategies: + +- xgap: + symbol: "ETHUSDT" + sourceExchange: binance + tradingExchange: max + updateInterval: 1m + dailyMaxVolume: 100 + dailyFeeBudgets: + MAX: 100 + persistence: + type: redis diff --git a/config/xmaker-btcusdt.yaml b/config/xmaker-btcusdt.yaml new file mode 100644 index 0000000000..6beb4bd52b --- /dev/null +++ b/config/xmaker-btcusdt.yaml @@ -0,0 +1,101 @@ +--- +notifications: + slack: + defaultChannel: "dev-bbgo" + errorChannel: "bbgo-error" + + # if you want to route channel by symbol + symbolChannels: + "^BTC": "btc" + "^ETH": "eth" + + # if you want to route channel by exchange session + sessionChannels: + max: "bbgo-max" + binance: "bbgo-binance" + + # routing rules + routing: + trade: "$symbol" + order: "$silent" + submitOrder: "$silent" + pnL: "bbgo-pnl" + +reportPnL: +- averageCostBySymbols: + - "BTCUSDT" + - "BNBUSDT" + of: binance + when: + - "@daily" + - "@hourly" + +persistence: + json: + directory: var/data + redis: + host: 127.0.0.1 + port: 6379 + db: 0 + +sessions: + max: + exchange: max + envVarPrefix: MAX + + binance: + exchange: binance + envVarPrefix: BINANCE + +riskControls: + # This is the session-based risk controller, which let you configure different risk controller by session. + sessionBased: + # "max" is the session name that you want to configure the risk control + max: + # orderExecutor is one of the risk control + orderExecutor: + # symbol-routed order executor + bySymbol: + BTCUSDT: + # basic risk control order executor + basic: + # keep at least X USDT (keep cash) + minQuoteBalance: 100.0 + + # maximum BTC balance (don't buy too much) + maxBaseAssetBalance: 1.0 + + # minimum BTC balance (don't sell too much) + minBaseAssetBalance: 0.01 + + maxOrderAmount: 1000.0 + +crossExchangeStrategies: + +- xmaker: + symbol: BTCUSDT + sourceExchange: binance + makerExchange: max + updateInterval: 1s + + # disableHedge disables the hedge orders on the source exchange + # disableHedge: true + + hedgeInterval: 10s + + margin: 0.004 + askMargin: 0.004 + bidMargin: 0.004 + + quantity: 0.001 + quantityMultiplier: 2 + + # numLayers means how many order we want to place on each side. 3 means we want 3 bid orders and 3 ask orders + numLayers: 1 + # pips is the fraction numbers between each order. for BTC, 1 pip is 0.1, + # 0.1 pip is 0.01, here we use 10, so we will get 18000.00, 18001.00 and + # 18002.00 + pips: 10 + persistence: + type: redis + diff --git a/config/xmaker-ethusdt.yaml b/config/xmaker-ethusdt.yaml new file mode 100644 index 0000000000..b7faef1f4e --- /dev/null +++ b/config/xmaker-ethusdt.yaml @@ -0,0 +1,94 @@ +--- +notifications: + slack: + defaultChannel: "dev-bbgo" + errorChannel: "bbgo-error" + + # if you want to route channel by symbol + symbolChannels: + "^BTC": "btc" + "^ETH": "eth" + + # if you want to route channel by exchange session + sessionChannels: + max: "bbgo-max" + binance: "bbgo-binance" + + # routing rules + routing: + trade: "$symbol" + order: "$silent" + submitOrder: "$silent" + pnL: "bbgo-pnl" + +persistence: + json: + directory: var/data + redis: + host: 127.0.0.1 + port: 6379 + db: 0 + +sessions: + max: + exchange: max + envVarPrefix: max + + binance: + exchange: binance + envVarPrefix: binance + +riskControls: + # This is the session-based risk controller, which let you configure different risk controller by session. + sessionBased: + # "max" is the session name that you want to configure the risk control + max: + # orderExecutor is one of the risk control + orderExecutor: + # symbol-routed order executor + bySymbol: + ETHUSDT: + # basic risk control order executor + basic: + # keep at least X USDT (keep cash) + minQuoteBalance: 100.0 + + # maximum ETH balance (don't buy too much) + maxBaseAssetBalance: 10.0 + + # minimum ETH balance (don't sell too much) + minBaseAssetBalance: 0.0 + + maxOrderAmount: 1000.0 + +crossExchangeStrategies: + +- xmaker: + symbol: ETHUSDT + sourceExchange: binance + makerExchange: max + updateInterval: 2s + + # disableHedge disables the hedge orders on the source exchange + # disableHedge: true + + hedgeInterval: 10s + + margin: 0.004 + askMargin: 0.004 + bidMargin: 0.004 + + quantity: 0.01 + quantityMultiplier: 2 + + # numLayers means how many order we want to place on each side. 3 means we want 3 bid orders and 3 ask orders + numLayers: 2 + + # pips is the fraction numbers between each order. for BTC, 1 pip is 0.1, + # 0.1 pip is 0.01, here we use 10, so we will get 18000.00, 18001.00 and + # 18002.00 + pips: 10 + + persistence: + type: redis + diff --git a/config/xmaker.yaml b/config/xmaker.yaml new file mode 100644 index 0000000000..293b58247b --- /dev/null +++ b/config/xmaker.yaml @@ -0,0 +1,59 @@ +--- +notifications: + slack: + defaultChannel: "dev-bbgo" + errorChannel: "bbgo-error" + + # routing rules + routing: + trade: "$silent" + order: "$silent" + submitOrder: "$silent" + +persistence: + json: + directory: var/data + redis: + host: 127.0.0.1 + port: 6379 + db: 0 + +sessions: + max: + exchange: max + envVarPrefix: max + + binance: + exchange: binance + envVarPrefix: binance + +crossExchangeStrategies: + +- xmaker: + symbol: "BTCUSDT" + sourceExchange: binance + makerExchange: max + updateInterval: 1s + + # disableHedge disables the hedge orders on the source exchange + # disableHedge: true + + hedgeInterval: 10s + notifyTrade: true + + margin: 0.004 + askMargin: 0.4% + bidMargin: 0.4% + + quantity: 0.001 + quantityMultiplier: 2 + + # numLayers means how many order we want to place on each side. 3 means we want 3 bid orders and 3 ask orders + numLayers: 1 + # pips is the fraction numbers between each order. for BTC, 1 pip is 0.1, + # 0.1 pip is 0.01, here we use 10, so we will get 18000.00, 18001.00 and + # 18002.00 + pips: 10 + persistence: + type: redis + diff --git a/config/xnav.yaml b/config/xnav.yaml new file mode 100644 index 0000000000..e31a54b668 --- /dev/null +++ b/config/xnav.yaml @@ -0,0 +1,47 @@ +--- +notifications: + slack: + defaultChannel: "dev-bbgo" + errorChannel: "bbgo-error" + + # if you want to route channel by symbol + symbolChannels: + "^BTC": "btc" + "^ETH": "eth" + + # if you want to route channel by exchange session + sessionChannels: + max: "bbgo-max" + binance: "bbgo-binance" + + # routing rules + routing: + trade: "$symbol" + order: "$slient" + submitOrder: "$slient" + pnL: "bbgo-pnl" + +sessions: + max: + exchange: max + envVarPrefix: max + + binance: + exchange: binance + envVarPrefix: binance + +persistence: + json: + directory: var/data + redis: + host: 127.0.0.1 + port: 6379 + db: 0 + +crossExchangeStrategies: + +- xnav: + interval: 1h + reportOnStart: true + ignoreDusts: true + diff --git a/contracts/.eslintrc.js b/contracts/.eslintrc.js new file mode 100644 index 0000000000..11a03cb0ae --- /dev/null +++ b/contracts/.eslintrc.js @@ -0,0 +1,17 @@ +module.exports = { + parser: 'babel-eslint', + extends: 'standard', + env: { + node: true, + es6: true, + mocha: true + }, + rules: { + 'space-before-function-paren': ['error', 'never'] + }, + globals: { + contract: true, + web3: true, + assert: true + } +} diff --git a/contracts/.gitignore b/contracts/.gitignore new file mode 100644 index 0000000000..412a02cace --- /dev/null +++ b/contracts/.gitignore @@ -0,0 +1,3 @@ +/build +/node_modules +/cache diff --git a/contracts/.solhint.json b/contracts/.solhint.json new file mode 100644 index 0000000000..c481cc550b --- /dev/null +++ b/contracts/.solhint.json @@ -0,0 +1,8 @@ +{ + "extends": "solhint:recommended", + "plugins": [], + "rules": { + "compiler-version": ["error", ">=0.6.6"], + "reason-string": ["warn", {"maxLength": 64}] + } +} diff --git a/contracts/README.md b/contracts/README.md new file mode 100644 index 0000000000..530130cd58 --- /dev/null +++ b/contracts/README.md @@ -0,0 +1,51 @@ +# BBG Contracts +------------ + +### 1. Before Start + +Create and modify the following files in this directory, the secret key inside the files are dummy ones from truffle dev server: +- development-secret.json +- polygon-secret.json +- bsc-secret.json + +### 2. Prepare the dependencies + +```bash +npm i +# if you want to develope in localhost, try to run npm run devserver separately +# ex: npm run devserver +# it will give you a set of secrets and account addresses +``` + +### 3. Deploy + +Migrate: +```bash +npm run migrate:dev +# npm run migrate:polygon +# npm run migrate:polygon-test +# npm run migrate:bsc +# npm run migrate:bsc-test +``` + +Lint: +```bash +npm run lint +# # fix solidity issue +# npm run lint:sol:fix +# # fix js issue +# npm run lint:js:fix +``` + +Test: +```bash +npm run test +``` + +```bash +truffle run verify ChildMintableERC20 --network polygon +``` + +```bash +truffle run verify ChildMintableERC20@0x3Afe98235d680e8d7A52e1458a59D60f45F935C0 --network polygon +``` diff --git a/contracts/bsc-secret.json b/contracts/bsc-secret.json new file mode 100644 index 0000000000..df858f2130 --- /dev/null +++ b/contracts/bsc-secret.json @@ -0,0 +1,4 @@ +{ + "privateKey": "3899a918953e01bfe218116cdfeccbed579e26275c4a89abcbc70d2cb9e9bbb8", + "etherScanApiKey": "" +} diff --git a/contracts/contracts/Migrations.sol b/contracts/contracts/Migrations.sol new file mode 100644 index 0000000000..3e92ef0f35 --- /dev/null +++ b/contracts/contracts/Migrations.sol @@ -0,0 +1,20 @@ +// SPDX-License-Identifier: MIT +pragma solidity >=0.4.22 <0.9.0; + + +contract Migrations { + address public owner = msg.sender; + uint public last_completed_migration; + + modifier restricted() { + require( + msg.sender == owner, + "This function is restricted to the contract's owner" + ); + _; + } + + function setCompleted(uint completed) public restricted { + last_completed_migration = completed; + } +} diff --git a/contracts/contracts/Token.sol b/contracts/contracts/Token.sol new file mode 100644 index 0000000000..c95b40bd05 --- /dev/null +++ b/contracts/contracts/Token.sol @@ -0,0 +1,5 @@ +pragma solidity ^0.6.2; + +import "@openzeppelin/contracts/presets/ERC20PresetMinterPauser.sol"; + + diff --git a/contracts/contracts/child/ChildToken/ChildMintableERC20.sol b/contracts/contracts/child/ChildToken/ChildMintableERC20.sol new file mode 100644 index 0000000000..b0f280ea17 --- /dev/null +++ b/contracts/contracts/child/ChildToken/ChildMintableERC20.sol @@ -0,0 +1,79 @@ +pragma solidity 0.6.6; + +import {ERC20} from "@openzeppelin/contracts/token/ERC20/ERC20.sol"; +import {AccessControlMixin} from "../../common/AccessControlMixin.sol"; +import {IChildToken} from "./IChildToken.sol"; +import {NativeMetaTransaction} from "../../common/NativeMetaTransaction.sol"; +import {ContextMixin} from "../../common/ContextMixin.sol"; + + +contract ChildMintableERC20 is + ERC20, + IChildToken, + AccessControlMixin, + NativeMetaTransaction, + ContextMixin +{ + bytes32 public constant DEPOSITOR_ROLE = keccak256("DEPOSITOR_ROLE"); + + constructor( + string memory name_, + string memory symbol_, + uint8 decimals_, + address childChainManager + ) public ERC20(name_, symbol_) { + _setupContractId("ChildMintableERC20"); + _setupDecimals(decimals_); + _setupRole(DEFAULT_ADMIN_ROLE, _msgSender()); + _setupRole(DEPOSITOR_ROLE, childChainManager); + _initializeEIP712(name_); + } + + // This is to support Native meta transactions + // never use msg.sender directly, use _msgSender() instead + function _msgSender() + internal + override + view + returns (address payable sender) + { + return ContextMixin.msgSender(); + } + + /** + * @notice called when token is deposited on root chain + * @dev Should be callable only by ChildChainManager + * Should handle deposit by minting the required amount for user + * Make sure minting is done only by this function + * @param user user address for whom deposit is being done + * @param depositData abi encoded amount + */ + function deposit(address user, bytes calldata depositData) + external + override + only(DEPOSITOR_ROLE) + { + uint256 amount = abi.decode(depositData, (uint256)); + _mint(user, amount); + } + + /** + * @notice called when user wants to withdraw tokens back to root chain + * @dev Should burn user's tokens. This transaction will be verified when exiting on root chain + * @param amount amount of tokens to withdraw + */ + function withdraw(uint256 amount) external { + _burn(_msgSender(), amount); + } + + /** + * @notice Example function to handle minting tokens on matic chain + * @dev Minting can be done as per requirement, + * This implementation allows only admin to mint tokens but it can be changed as per requirement + * @param user user for whom tokens are being minted + * @param amount amount of token to mint + */ + function mint(address user, uint256 amount) public only(DEFAULT_ADMIN_ROLE) { + _mint(user, amount); + } +} diff --git a/contracts/contracts/child/ChildToken/IChildToken.sol b/contracts/contracts/child/ChildToken/IChildToken.sol new file mode 100644 index 0000000000..d3a2f39ddd --- /dev/null +++ b/contracts/contracts/child/ChildToken/IChildToken.sol @@ -0,0 +1,5 @@ +pragma solidity 0.6.6; + +interface IChildToken { + function deposit(address user, bytes calldata depositData) external; +} diff --git a/contracts/contracts/common/AccessControlMixin.sol b/contracts/contracts/common/AccessControlMixin.sol new file mode 100644 index 0000000000..29c3e6084f --- /dev/null +++ b/contracts/contracts/common/AccessControlMixin.sol @@ -0,0 +1,19 @@ +pragma solidity 0.6.6; + +import {AccessControl} from "@openzeppelin/contracts/access/AccessControl.sol"; + + +contract AccessControlMixin is AccessControl { + string private _revertMsg; + function _setupContractId(string memory contractId) internal { + _revertMsg = string(abi.encodePacked(contractId, ": INSUFFICIENT_PERMISSIONS")); + } + + modifier only(bytes32 role) { + require( + hasRole(role, _msgSender()), + _revertMsg + ); + _; + } +} diff --git a/contracts/contracts/common/ContextMixin.sol b/contracts/contracts/common/ContextMixin.sol new file mode 100644 index 0000000000..5a3ff323ff --- /dev/null +++ b/contracts/contracts/common/ContextMixin.sol @@ -0,0 +1,25 @@ +pragma solidity 0.6.6; + +abstract contract ContextMixin { + function msgSender() + internal + view + returns (address payable sender) + { + if (msg.sender == address(this)) { + bytes memory array = msg.data; + uint256 index = msg.data.length; + /* solhint-disable no-inline-assembly */ + assembly { + // Load the 32 bytes word from memory with the address on the lower 20 bytes, and mask those. + sender := and( + mload(add(array, index)), + 0xffffffffffffffffffffffffffffffffffffffff + ) + } + } else { + sender = msg.sender; + } + return sender; + } +} diff --git a/contracts/contracts/common/EIP712Base.sol b/contracts/contracts/common/EIP712Base.sol new file mode 100644 index 0000000000..9e8dafbf73 --- /dev/null +++ b/contracts/contracts/common/EIP712Base.sol @@ -0,0 +1,77 @@ +pragma solidity 0.6.6; + +import {Initializable} from "./Initializable.sol"; + + +contract EIP712Base is Initializable { + struct EIP712Domain { + string name; + string version; + address verifyingContract; + bytes32 salt; + } + + string constant public ERC712_VERSION = "1"; + + bytes32 internal constant EIP712_DOMAIN_TYPEHASH = keccak256( + bytes( + "EIP712Domain(string name,string version,address verifyingContract,bytes32 salt)" + ) + ); + bytes32 internal domainSeperator; + + // supposed to be called once while initializing. + // one of the contractsa that inherits this contract follows proxy pattern + // so it is not possible to do this in a constructor + function _initializeEIP712( + string memory name + ) + internal + initializer + { + _setDomainSeperator(name); + } + + function _setDomainSeperator(string memory name) internal { + domainSeperator = keccak256( + abi.encode( + EIP712_DOMAIN_TYPEHASH, + keccak256(bytes(name)), + keccak256(bytes(ERC712_VERSION)), + address(this), + bytes32(getChainId()) + ) + ); + } + + function getDomainSeperator() public view returns (bytes32) { + return domainSeperator; + } + + function getChainId() public pure returns (uint256) { + uint256 id; + /* solhint-disable no-inline-assembly */ + assembly { + id := chainid() + } + return id; + } + + /** + * Accept message hash and returns hash message in EIP712 compatible form + * So that it can be used to recover signer from signature signed using EIP712 formatted data + * https://eips.ethereum.org/EIPS/eip-712 + * "\\x19" makes the encoding deterministic + * "\\x01" is the version byte to make it compatible to EIP-191 + */ + function toTypedMessageHash(bytes32 messageHash) + internal + view + returns (bytes32) + { + return + keccak256( + abi.encodePacked("\x19\x01", getDomainSeperator(), messageHash) + ); + } +} diff --git a/contracts/contracts/common/Initializable.sol b/contracts/contracts/common/Initializable.sol new file mode 100644 index 0000000000..c840ec78cf --- /dev/null +++ b/contracts/contracts/common/Initializable.sol @@ -0,0 +1,12 @@ +pragma solidity 0.6.6; + + +contract Initializable { + bool public inited = false; + + modifier initializer() { + require(!inited, "already inited"); + _; + inited = true; + } +} diff --git a/contracts/contracts/common/NativeMetaTransaction.sol b/contracts/contracts/common/NativeMetaTransaction.sol new file mode 100644 index 0000000000..0edc889ae4 --- /dev/null +++ b/contracts/contracts/common/NativeMetaTransaction.sol @@ -0,0 +1,106 @@ +pragma solidity 0.6.6; + +import {SafeMath} from "@openzeppelin/contracts/math/SafeMath.sol"; +import {EIP712Base} from "./EIP712Base.sol"; + + +contract NativeMetaTransaction is EIP712Base { + using SafeMath for uint256; + bytes32 private constant META_TRANSACTION_TYPEHASH = keccak256( + bytes( + "MetaTransaction(uint256 nonce,address from,bytes functionSignature)" + ) + ); + event MetaTransactionExecuted( + address userAddress, + address payable relayerAddress, + bytes functionSignature + ); + mapping(address => uint256) public nonces; + + /* + * Meta transaction structure. + * No point of including value field here as if user is doing value transfer then he has the funds to pay for gas + * He should call the desired function directly in that case. + */ + struct MetaTransaction { + uint256 nonce; + address from; + bytes functionSignature; + } + + function executeMetaTransaction( + address userAddress, + bytes memory functionSignature, + bytes32 sigR, + bytes32 sigS, + uint8 sigV + ) public payable returns (bytes memory) { + MetaTransaction memory metaTx = MetaTransaction({ + nonce: nonces[userAddress], + from: userAddress, + functionSignature: functionSignature + }); + + require( + verify(userAddress, metaTx, sigR, sigS, sigV), + "Signer and signature do not match" + ); + + // increase nonce for user (to avoid re-use) + nonces[userAddress] = nonces[userAddress].add(1); + + emit MetaTransactionExecuted( + userAddress, + msg.sender, + functionSignature + ); + + // Append userAddress and relayer address at the end to extract it from calling context + /* solhint-disable avoid-low-level-calls */ + (bool success, bytes memory returnData) = address(this).call( + abi.encodePacked(functionSignature, userAddress) + ); + require(success, "Function call not successful"); + + return returnData; + } + + function hashMetaTransaction(MetaTransaction memory metaTx) + internal + pure + returns (bytes32) + { + return + keccak256( + abi.encode( + META_TRANSACTION_TYPEHASH, + metaTx.nonce, + metaTx.from, + keccak256(metaTx.functionSignature) + ) + ); + } + + function getNonce(address user) public view returns (uint256 nonce) { + nonce = nonces[user]; + } + + function verify( + address signer, + MetaTransaction memory metaTx, + bytes32 sigR, + bytes32 sigS, + uint8 sigV + ) internal view returns (bool) { + require(signer != address(0), "NativeMetaTransaction: INVALID_SIGNER"); + return + signer == + ecrecover( + toTypedMessageHash(hashMetaTransaction(metaTx)), + sigV, + sigR, + sigS + ); + } +} diff --git a/contracts/contracts/common/Proxy/IERCProxy.sol b/contracts/contracts/common/Proxy/IERCProxy.sol new file mode 100644 index 0000000000..f87f220435 --- /dev/null +++ b/contracts/contracts/common/Proxy/IERCProxy.sol @@ -0,0 +1,7 @@ +pragma solidity 0.6.6; + +interface IERCProxy { + function proxyType() external pure returns (uint256 proxyTypeId); + + function implementation() external view returns (address codeAddr); +} diff --git a/contracts/contracts/common/Proxy/Proxy.sol b/contracts/contracts/common/Proxy/Proxy.sol new file mode 100644 index 0000000000..8ca3233729 --- /dev/null +++ b/contracts/contracts/common/Proxy/Proxy.sol @@ -0,0 +1,39 @@ +pragma solidity 0.6.6; +import {IERCProxy} from "./IERCProxy.sol"; + +abstract contract Proxy is IERCProxy { + function delegatedFwd(address _dst, bytes memory _calldata) internal { + // solium-disable-next-line security/no-inline-assembly + assembly { + let result := delegatecall( + sub(gas(), 10000), + _dst, + add(_calldata, 0x20), + mload(_calldata), + 0, + 0 + ) + let size := returndatasize() + + let ptr := mload(0x40) + returndatacopy(ptr, 0, size) + + // revert instead of invalid() bc if the underlying call failed with invalid() it already wasted gas. + // if the call returned error data, forward it + switch result + case 0 { + revert(ptr, size) + } + default { + return(ptr, size) + } + } + } + + function proxyType() external virtual override pure returns (uint256 proxyTypeId) { + // Upgradeable proxy + proxyTypeId = 2; + } + + function implementation() external virtual override view returns (address); +} diff --git a/contracts/contracts/common/Proxy/UpgradableProxy.sol b/contracts/contracts/common/Proxy/UpgradableProxy.sol new file mode 100644 index 0000000000..c88142a6bb --- /dev/null +++ b/contracts/contracts/common/Proxy/UpgradableProxy.sol @@ -0,0 +1,103 @@ +pragma solidity 0.6.6; + +import {Proxy} from "./Proxy.sol"; + +contract UpgradableProxy is Proxy { + event ProxyUpdated(address indexed _new, address indexed _old); + event ProxyOwnerUpdate(address _new, address _old); + + bytes32 constant IMPLEMENTATION_SLOT = keccak256("matic.network.proxy.implementation"); + bytes32 constant OWNER_SLOT = keccak256("matic.network.proxy.owner"); + + constructor(address _proxyTo) public { + setProxyOwner(msg.sender); + setImplementation(_proxyTo); + } + + fallback() external payable { + delegatedFwd(loadImplementation(), msg.data); + } + + receive() external payable { + delegatedFwd(loadImplementation(), msg.data); + } + + modifier onlyProxyOwner() { + require(loadProxyOwner() == msg.sender, "NOT_OWNER"); + _; + } + + function proxyOwner() external view returns(address) { + return loadProxyOwner(); + } + + function loadProxyOwner() internal view returns(address) { + address _owner; + bytes32 position = OWNER_SLOT; + assembly { + _owner := sload(position) + } + return _owner; + } + + function implementation() external override view returns (address) { + return loadImplementation(); + } + + function loadImplementation() internal view returns(address) { + address _impl; + bytes32 position = IMPLEMENTATION_SLOT; + assembly { + _impl := sload(position) + } + return _impl; + } + + function transferProxyOwnership(address newOwner) public onlyProxyOwner { + require(newOwner != address(0), "ZERO_ADDRESS"); + emit ProxyOwnerUpdate(newOwner, loadProxyOwner()); + setProxyOwner(newOwner); + } + + function setProxyOwner(address newOwner) private { + bytes32 position = OWNER_SLOT; + assembly { + sstore(position, newOwner) + } + } + + function updateImplementation(address _newProxyTo) public onlyProxyOwner { + require(_newProxyTo != address(0x0), "INVALID_PROXY_ADDRESS"); + require(isContract(_newProxyTo), "DESTINATION_ADDRESS_IS_NOT_A_CONTRACT"); + + emit ProxyUpdated(_newProxyTo, loadImplementation()); + + setImplementation(_newProxyTo); + } + + function updateAndCall(address _newProxyTo, bytes memory data) payable public onlyProxyOwner { + updateImplementation(_newProxyTo); + + (bool success, bytes memory returnData) = address(this).call{value: msg.value}(data); + require(success, string(returnData)); + } + + function setImplementation(address _newProxyTo) private { + bytes32 position = IMPLEMENTATION_SLOT; + assembly { + sstore(position, _newProxyTo) + } + } + + function isContract(address _target) internal view returns (bool) { + if (_target == address(0)) { + return false; + } + + uint256 size; + assembly { + size := extcodesize(_target) + } + return size > 0; + } +} diff --git a/contracts/development-secret.json b/contracts/development-secret.json new file mode 100644 index 0000000000..df858f2130 --- /dev/null +++ b/contracts/development-secret.json @@ -0,0 +1,4 @@ +{ + "privateKey": "3899a918953e01bfe218116cdfeccbed579e26275c4a89abcbc70d2cb9e9bbb8", + "etherScanApiKey": "" +} diff --git a/contracts/flat/BBG.sol b/contracts/flat/BBG.sol new file mode 100644 index 0000000000..367188d28f --- /dev/null +++ b/contracts/flat/BBG.sol @@ -0,0 +1,1433 @@ +// File: @openzeppelin/contracts/utils/EnumerableSet.sol + +// SPDX-License-Identifier: MIT + +pragma solidity ^0.6.0; + +/** + * @dev Library for managing + * https://en.wikipedia.org/wiki/Set_(abstract_data_type)[sets] of primitive + * types. + * + * Sets have the following properties: + * + * - Elements are added, removed, and checked for existence in constant time + * (O(1)). + * - Elements are enumerated in O(n). No guarantees are made on the ordering. + * + * ``` + * contract Example { + * // Add the library methods + * using EnumerableSet for EnumerableSet.AddressSet; + * + * // Declare a set state variable + * EnumerableSet.AddressSet private mySet; + * } + * ``` + * + * As of v3.0.0, only sets of type `address` (`AddressSet`) and `uint256` + * (`UintSet`) are supported. + */ +library EnumerableSet { + // To implement this library for multiple types with as little code + // repetition as possible, we write it in terms of a generic Set type with + // bytes32 values. + // The Set implementation uses private functions, and user-facing + // implementations (such as AddressSet) are just wrappers around the + // underlying Set. + // This means that we can only create new EnumerableSets for types that fit + // in bytes32. + + struct Set { + // Storage of set values + bytes32[] _values; + + // Position of the value in the `values` array, plus 1 because index 0 + // means a value is not in the set. + mapping (bytes32 => uint256) _indexes; + } + + /** + * @dev Add a value to a set. O(1). + * + * Returns true if the value was added to the set, that is if it was not + * already present. + */ + function _add(Set storage set, bytes32 value) private returns (bool) { + if (!_contains(set, value)) { + set._values.push(value); + // The value is stored at length-1, but we add 1 to all indexes + // and use 0 as a sentinel value + set._indexes[value] = set._values.length; + return true; + } else { + return false; + } + } + + /** + * @dev Removes a value from a set. O(1). + * + * Returns true if the value was removed from the set, that is if it was + * present. + */ + function _remove(Set storage set, bytes32 value) private returns (bool) { + // We read and store the value's index to prevent multiple reads from the same storage slot + uint256 valueIndex = set._indexes[value]; + + if (valueIndex != 0) { // Equivalent to contains(set, value) + // To delete an element from the _values array in O(1), we swap the element to delete with the last one in + // the array, and then remove the last element (sometimes called as 'swap and pop'). + // This modifies the order of the array, as noted in {at}. + + uint256 toDeleteIndex = valueIndex - 1; + uint256 lastIndex = set._values.length - 1; + + // When the value to delete is the last one, the swap operation is unnecessary. However, since this occurs + // so rarely, we still do the swap anyway to avoid the gas cost of adding an 'if' statement. + + bytes32 lastvalue = set._values[lastIndex]; + + // Move the last value to the index where the value to delete is + set._values[toDeleteIndex] = lastvalue; + // Update the index for the moved value + set._indexes[lastvalue] = toDeleteIndex + 1; // All indexes are 1-based + + // Delete the slot where the moved value was stored + set._values.pop(); + + // Delete the index for the deleted slot + delete set._indexes[value]; + + return true; + } else { + return false; + } + } + + /** + * @dev Returns true if the value is in the set. O(1). + */ + function _contains(Set storage set, bytes32 value) private view returns (bool) { + return set._indexes[value] != 0; + } + + /** + * @dev Returns the number of values on the set. O(1). + */ + function _length(Set storage set) private view returns (uint256) { + return set._values.length; + } + + /** + * @dev Returns the value stored at position `index` in the set. O(1). + * + * Note that there are no guarantees on the ordering of values inside the + * array, and it may change when more values are added or removed. + * + * Requirements: + * + * - `index` must be strictly less than {length}. + */ + function _at(Set storage set, uint256 index) private view returns (bytes32) { + require(set._values.length > index, "EnumerableSet: index out of bounds"); + return set._values[index]; + } + + // AddressSet + + struct AddressSet { + Set _inner; + } + + /** + * @dev Add a value to a set. O(1). + * + * Returns true if the value was added to the set, that is if it was not + * already present. + */ + function add(AddressSet storage set, address value) internal returns (bool) { + return _add(set._inner, bytes32(uint256(value))); + } + + /** + * @dev Removes a value from a set. O(1). + * + * Returns true if the value was removed from the set, that is if it was + * present. + */ + function remove(AddressSet storage set, address value) internal returns (bool) { + return _remove(set._inner, bytes32(uint256(value))); + } + + /** + * @dev Returns true if the value is in the set. O(1). + */ + function contains(AddressSet storage set, address value) internal view returns (bool) { + return _contains(set._inner, bytes32(uint256(value))); + } + + /** + * @dev Returns the number of values in the set. O(1). + */ + function length(AddressSet storage set) internal view returns (uint256) { + return _length(set._inner); + } + + /** + * @dev Returns the value stored at position `index` in the set. O(1). + * + * Note that there are no guarantees on the ordering of values inside the + * array, and it may change when more values are added or removed. + * + * Requirements: + * + * - `index` must be strictly less than {length}. + */ + function at(AddressSet storage set, uint256 index) internal view returns (address) { + return address(uint256(_at(set._inner, index))); + } + + + // UintSet + + struct UintSet { + Set _inner; + } + + /** + * @dev Add a value to a set. O(1). + * + * Returns true if the value was added to the set, that is if it was not + * already present. + */ + function add(UintSet storage set, uint256 value) internal returns (bool) { + return _add(set._inner, bytes32(value)); + } + + /** + * @dev Removes a value from a set. O(1). + * + * Returns true if the value was removed from the set, that is if it was + * present. + */ + function remove(UintSet storage set, uint256 value) internal returns (bool) { + return _remove(set._inner, bytes32(value)); + } + + /** + * @dev Returns true if the value is in the set. O(1). + */ + function contains(UintSet storage set, uint256 value) internal view returns (bool) { + return _contains(set._inner, bytes32(value)); + } + + /** + * @dev Returns the number of values on the set. O(1). + */ + function length(UintSet storage set) internal view returns (uint256) { + return _length(set._inner); + } + + /** + * @dev Returns the value stored at position `index` in the set. O(1). + * + * Note that there are no guarantees on the ordering of values inside the + * array, and it may change when more values are added or removed. + * + * Requirements: + * + * - `index` must be strictly less than {length}. + */ + function at(UintSet storage set, uint256 index) internal view returns (uint256) { + return uint256(_at(set._inner, index)); + } +} + +// File: @openzeppelin/contracts/utils/Address.sol + + +pragma solidity ^0.6.2; + +/** + * @dev Collection of functions related to the address type + */ +library Address { + /** + * @dev Returns true if `account` is a contract. + * + * [IMPORTANT] + * ==== + * It is unsafe to assume that an address for which this function returns + * false is an externally-owned account (EOA) and not a contract. + * + * Among others, `isContract` will return false for the following + * types of addresses: + * + * - an externally-owned account + * - a contract in construction + * - an address where a contract will be created + * - an address where a contract lived, but was destroyed + * ==== + */ + function isContract(address account) internal view returns (bool) { + // This method relies in extcodesize, which returns 0 for contracts in + // construction, since the code is only stored at the end of the + // constructor execution. + + uint256 size; + // solhint-disable-next-line no-inline-assembly + assembly { size := extcodesize(account) } + return size > 0; + } + + /** + * @dev Replacement for Solidity's `transfer`: sends `amount` wei to + * `recipient`, forwarding all available gas and reverting on errors. + * + * https://eips.ethereum.org/EIPS/eip-1884[EIP1884] increases the gas cost + * of certain opcodes, possibly making contracts go over the 2300 gas limit + * imposed by `transfer`, making them unable to receive funds via + * `transfer`. {sendValue} removes this limitation. + * + * https://diligence.consensys.net/posts/2019/09/stop-using-soliditys-transfer-now/[Learn more]. + * + * IMPORTANT: because control is transferred to `recipient`, care must be + * taken to not create reentrancy vulnerabilities. Consider using + * {ReentrancyGuard} or the + * https://solidity.readthedocs.io/en/v0.5.11/security-considerations.html#use-the-checks-effects-interactions-pattern[checks-effects-interactions pattern]. + */ + function sendValue(address payable recipient, uint256 amount) internal { + require(address(this).balance >= amount, "Address: insufficient balance"); + + // solhint-disable-next-line avoid-low-level-calls, avoid-call-value + (bool success, ) = recipient.call{ value: amount }(""); + require(success, "Address: unable to send value, recipient may have reverted"); + } + + /** + * @dev Performs a Solidity function call using a low level `call`. A + * plain`call` is an unsafe replacement for a function call: use this + * function instead. + * + * If `target` reverts with a revert reason, it is bubbled up by this + * function (like regular Solidity function calls). + * + * Returns the raw returned data. To convert to the expected return value, + * use https://solidity.readthedocs.io/en/latest/units-and-global-variables.html?highlight=abi.decode#abi-encoding-and-decoding-functions[`abi.decode`]. + * + * Requirements: + * + * - `target` must be a contract. + * - calling `target` with `data` must not revert. + * + * _Available since v3.1._ + */ + function functionCall(address target, bytes memory data) internal returns (bytes memory) { + return functionCall(target, data, "Address: low-level call failed"); + } + + /** + * @dev Same as {xref-Address-functionCall-address-bytes-}[`functionCall`], but with + * `errorMessage` as a fallback revert reason when `target` reverts. + * + * _Available since v3.1._ + */ + function functionCall(address target, bytes memory data, string memory errorMessage) internal returns (bytes memory) { + return _functionCallWithValue(target, data, 0, errorMessage); + } + + /** + * @dev Same as {xref-Address-functionCall-address-bytes-}[`functionCall`], + * but also transferring `value` wei to `target`. + * + * Requirements: + * + * - the calling contract must have an ETH balance of at least `value`. + * - the called Solidity function must be `payable`. + * + * _Available since v3.1._ + */ + function functionCallWithValue(address target, bytes memory data, uint256 value) internal returns (bytes memory) { + return functionCallWithValue(target, data, value, "Address: low-level call with value failed"); + } + + /** + * @dev Same as {xref-Address-functionCallWithValue-address-bytes-uint256-}[`functionCallWithValue`], but + * with `errorMessage` as a fallback revert reason when `target` reverts. + * + * _Available since v3.1._ + */ + function functionCallWithValue(address target, bytes memory data, uint256 value, string memory errorMessage) internal returns (bytes memory) { + require(address(this).balance >= value, "Address: insufficient balance for call"); + return _functionCallWithValue(target, data, value, errorMessage); + } + + function _functionCallWithValue(address target, bytes memory data, uint256 weiValue, string memory errorMessage) private returns (bytes memory) { + require(isContract(target), "Address: call to non-contract"); + + // solhint-disable-next-line avoid-low-level-calls + (bool success, bytes memory returndata) = target.call{ value: weiValue }(data); + if (success) { + return returndata; + } else { + // Look for revert reason and bubble it up if present + if (returndata.length > 0) { + // The easiest way to bubble the revert reason is using memory via assembly + + // solhint-disable-next-line no-inline-assembly + assembly { + let returndata_size := mload(returndata) + revert(add(32, returndata), returndata_size) + } + } else { + revert(errorMessage); + } + } + } +} + +// File: @openzeppelin/contracts/GSN/Context.sol + + +pragma solidity ^0.6.0; + +/* + * @dev Provides information about the current execution context, including the + * sender of the transaction and its data. While these are generally available + * via msg.sender and msg.data, they should not be accessed in such a direct + * manner, since when dealing with GSN meta-transactions the account sending and + * paying for execution may not be the actual sender (as far as an application + * is concerned). + * + * This contract is only required for intermediate, library-like contracts. + */ +abstract contract Context { + function _msgSender() internal view virtual returns (address payable) { + return msg.sender; + } + + function _msgData() internal view virtual returns (bytes memory) { + this; // silence state mutability warning without generating bytecode - see https://github.com/ethereum/solidity/issues/2691 + return msg.data; + } +} + +// File: @openzeppelin/contracts/access/AccessControl.sol + + +pragma solidity ^0.6.0; + + + + +/** + * @dev Contract module that allows children to implement role-based access + * control mechanisms. + * + * Roles are referred to by their `bytes32` identifier. These should be exposed + * in the external API and be unique. The best way to achieve this is by + * using `public constant` hash digests: + * + * ``` + * bytes32 public constant MY_ROLE = keccak256("MY_ROLE"); + * ``` + * + * Roles can be used to represent a set of permissions. To restrict access to a + * function call, use {hasRole}: + * + * ``` + * function foo() public { + * require(hasRole(MY_ROLE, msg.sender)); + * ... + * } + * ``` + * + * Roles can be granted and revoked dynamically via the {grantRole} and + * {revokeRole} functions. Each role has an associated admin role, and only + * accounts that have a role's admin role can call {grantRole} and {revokeRole}. + * + * By default, the admin role for all roles is `DEFAULT_ADMIN_ROLE`, which means + * that only accounts with this role will be able to grant or revoke other + * roles. More complex role relationships can be created by using + * {_setRoleAdmin}. + * + * WARNING: The `DEFAULT_ADMIN_ROLE` is also its own admin: it has permission to + * grant and revoke this role. Extra precautions should be taken to secure + * accounts that have been granted it. + */ +abstract contract AccessControl is Context { + using EnumerableSet for EnumerableSet.AddressSet; + using Address for address; + + struct RoleData { + EnumerableSet.AddressSet members; + bytes32 adminRole; + } + + mapping (bytes32 => RoleData) private _roles; + + bytes32 public constant DEFAULT_ADMIN_ROLE = 0x00; + + /** + * @dev Emitted when `newAdminRole` is set as ``role``'s admin role, replacing `previousAdminRole` + * + * `DEFAULT_ADMIN_ROLE` is the starting admin for all roles, despite + * {RoleAdminChanged} not being emitted signaling this. + * + * _Available since v3.1._ + */ + event RoleAdminChanged(bytes32 indexed role, bytes32 indexed previousAdminRole, bytes32 indexed newAdminRole); + + /** + * @dev Emitted when `account` is granted `role`. + * + * `sender` is the account that originated the contract call, an admin role + * bearer except when using {_setupRole}. + */ + event RoleGranted(bytes32 indexed role, address indexed account, address indexed sender); + + /** + * @dev Emitted when `account` is revoked `role`. + * + * `sender` is the account that originated the contract call: + * - if using `revokeRole`, it is the admin role bearer + * - if using `renounceRole`, it is the role bearer (i.e. `account`) + */ + event RoleRevoked(bytes32 indexed role, address indexed account, address indexed sender); + + /** + * @dev Returns `true` if `account` has been granted `role`. + */ + function hasRole(bytes32 role, address account) public view returns (bool) { + return _roles[role].members.contains(account); + } + + /** + * @dev Returns the number of accounts that have `role`. Can be used + * together with {getRoleMember} to enumerate all bearers of a role. + */ + function getRoleMemberCount(bytes32 role) public view returns (uint256) { + return _roles[role].members.length(); + } + + /** + * @dev Returns one of the accounts that have `role`. `index` must be a + * value between 0 and {getRoleMemberCount}, non-inclusive. + * + * Role bearers are not sorted in any particular way, and their ordering may + * change at any point. + * + * WARNING: When using {getRoleMember} and {getRoleMemberCount}, make sure + * you perform all queries on the same block. See the following + * https://forum.openzeppelin.com/t/iterating-over-elements-on-enumerableset-in-openzeppelin-contracts/2296[forum post] + * for more information. + */ + function getRoleMember(bytes32 role, uint256 index) public view returns (address) { + return _roles[role].members.at(index); + } + + /** + * @dev Returns the admin role that controls `role`. See {grantRole} and + * {revokeRole}. + * + * To change a role's admin, use {_setRoleAdmin}. + */ + function getRoleAdmin(bytes32 role) public view returns (bytes32) { + return _roles[role].adminRole; + } + + /** + * @dev Grants `role` to `account`. + * + * If `account` had not been already granted `role`, emits a {RoleGranted} + * event. + * + * Requirements: + * + * - the caller must have ``role``'s admin role. + */ + function grantRole(bytes32 role, address account) public virtual { + require(hasRole(_roles[role].adminRole, _msgSender()), "AccessControl: sender must be an admin to grant"); + + _grantRole(role, account); + } + + /** + * @dev Revokes `role` from `account`. + * + * If `account` had been granted `role`, emits a {RoleRevoked} event. + * + * Requirements: + * + * - the caller must have ``role``'s admin role. + */ + function revokeRole(bytes32 role, address account) public virtual { + require(hasRole(_roles[role].adminRole, _msgSender()), "AccessControl: sender must be an admin to revoke"); + + _revokeRole(role, account); + } + + /** + * @dev Revokes `role` from the calling account. + * + * Roles are often managed via {grantRole} and {revokeRole}: this function's + * purpose is to provide a mechanism for accounts to lose their privileges + * if they are compromised (such as when a trusted device is misplaced). + * + * If the calling account had been granted `role`, emits a {RoleRevoked} + * event. + * + * Requirements: + * + * - the caller must be `account`. + */ + function renounceRole(bytes32 role, address account) public virtual { + require(account == _msgSender(), "AccessControl: can only renounce roles for self"); + + _revokeRole(role, account); + } + + /** + * @dev Grants `role` to `account`. + * + * If `account` had not been already granted `role`, emits a {RoleGranted} + * event. Note that unlike {grantRole}, this function doesn't perform any + * checks on the calling account. + * + * [WARNING] + * ==== + * This function should only be called from the constructor when setting + * up the initial roles for the system. + * + * Using this function in any other way is effectively circumventing the admin + * system imposed by {AccessControl}. + * ==== + */ + function _setupRole(bytes32 role, address account) internal virtual { + _grantRole(role, account); + } + + /** + * @dev Sets `adminRole` as ``role``'s admin role. + * + * Emits a {RoleAdminChanged} event. + */ + function _setRoleAdmin(bytes32 role, bytes32 adminRole) internal virtual { + emit RoleAdminChanged(role, _roles[role].adminRole, adminRole); + _roles[role].adminRole = adminRole; + } + + function _grantRole(bytes32 role, address account) private { + if (_roles[role].members.add(account)) { + emit RoleGranted(role, account, _msgSender()); + } + } + + function _revokeRole(bytes32 role, address account) private { + if (_roles[role].members.remove(account)) { + emit RoleRevoked(role, account, _msgSender()); + } + } +} + +// File: @openzeppelin/contracts/token/ERC20/IERC20.sol + + +pragma solidity ^0.6.0; + +/** + * @dev Interface of the ERC20 standard as defined in the EIP. + */ +interface IERC20 { + /** + * @dev Returns the amount of tokens in existence. + */ + function totalSupply() external view returns (uint256); + + /** + * @dev Returns the amount of tokens owned by `account`. + */ + function balanceOf(address account) external view returns (uint256); + + /** + * @dev Moves `amount` tokens from the caller's account to `recipient`. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * Emits a {Transfer} event. + */ + function transfer(address recipient, uint256 amount) external returns (bool); + + /** + * @dev Returns the remaining number of tokens that `spender` will be + * allowed to spend on behalf of `owner` through {transferFrom}. This is + * zero by default. + * + * This value changes when {approve} or {transferFrom} are called. + */ + function allowance(address owner, address spender) external view returns (uint256); + + /** + * @dev Sets `amount` as the allowance of `spender` over the caller's tokens. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * IMPORTANT: Beware that changing an allowance with this method brings the risk + * that someone may use both the old and the new allowance by unfortunate + * transaction ordering. One possible solution to mitigate this race + * condition is to first reduce the spender's allowance to 0 and set the + * desired value afterwards: + * https://github.com/ethereum/EIPs/issues/20#issuecomment-263524729 + * + * Emits an {Approval} event. + */ + function approve(address spender, uint256 amount) external returns (bool); + + /** + * @dev Moves `amount` tokens from `sender` to `recipient` using the + * allowance mechanism. `amount` is then deducted from the caller's + * allowance. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * Emits a {Transfer} event. + */ + function transferFrom(address sender, address recipient, uint256 amount) external returns (bool); + + /** + * @dev Emitted when `value` tokens are moved from one account (`from`) to + * another (`to`). + * + * Note that `value` may be zero. + */ + event Transfer(address indexed from, address indexed to, uint256 value); + + /** + * @dev Emitted when the allowance of a `spender` for an `owner` is set by + * a call to {approve}. `value` is the new allowance. + */ + event Approval(address indexed owner, address indexed spender, uint256 value); +} + +// File: @openzeppelin/contracts/math/SafeMath.sol + + +pragma solidity ^0.6.0; + +/** + * @dev Wrappers over Solidity's arithmetic operations with added overflow + * checks. + * + * Arithmetic operations in Solidity wrap on overflow. This can easily result + * in bugs, because programmers usually assume that an overflow raises an + * error, which is the standard behavior in high level programming languages. + * `SafeMath` restores this intuition by reverting the transaction when an + * operation overflows. + * + * Using this library instead of the unchecked operations eliminates an entire + * class of bugs, so it's recommended to use it always. + */ +library SafeMath { + /** + * @dev Returns the addition of two unsigned integers, reverting on + * overflow. + * + * Counterpart to Solidity's `+` operator. + * + * Requirements: + * + * - Addition cannot overflow. + */ + function add(uint256 a, uint256 b) internal pure returns (uint256) { + uint256 c = a + b; + require(c >= a, "SafeMath: addition overflow"); + + return c; + } + + /** + * @dev Returns the subtraction of two unsigned integers, reverting on + * overflow (when the result is negative). + * + * Counterpart to Solidity's `-` operator. + * + * Requirements: + * + * - Subtraction cannot overflow. + */ + function sub(uint256 a, uint256 b) internal pure returns (uint256) { + return sub(a, b, "SafeMath: subtraction overflow"); + } + + /** + * @dev Returns the subtraction of two unsigned integers, reverting with custom message on + * overflow (when the result is negative). + * + * Counterpart to Solidity's `-` operator. + * + * Requirements: + * + * - Subtraction cannot overflow. + */ + function sub(uint256 a, uint256 b, string memory errorMessage) internal pure returns (uint256) { + require(b <= a, errorMessage); + uint256 c = a - b; + + return c; + } + + /** + * @dev Returns the multiplication of two unsigned integers, reverting on + * overflow. + * + * Counterpart to Solidity's `*` operator. + * + * Requirements: + * + * - Multiplication cannot overflow. + */ + function mul(uint256 a, uint256 b) internal pure returns (uint256) { + // Gas optimization: this is cheaper than requiring 'a' not being zero, but the + // benefit is lost if 'b' is also tested. + // See: https://github.com/OpenZeppelin/openzeppelin-contracts/pull/522 + if (a == 0) { + return 0; + } + + uint256 c = a * b; + require(c / a == b, "SafeMath: multiplication overflow"); + + return c; + } + + /** + * @dev Returns the integer division of two unsigned integers. Reverts on + * division by zero. The result is rounded towards zero. + * + * Counterpart to Solidity's `/` operator. Note: this function uses a + * `revert` opcode (which leaves remaining gas untouched) while Solidity + * uses an invalid opcode to revert (consuming all remaining gas). + * + * Requirements: + * + * - The divisor cannot be zero. + */ + function div(uint256 a, uint256 b) internal pure returns (uint256) { + return div(a, b, "SafeMath: division by zero"); + } + + /** + * @dev Returns the integer division of two unsigned integers. Reverts with custom message on + * division by zero. The result is rounded towards zero. + * + * Counterpart to Solidity's `/` operator. Note: this function uses a + * `revert` opcode (which leaves remaining gas untouched) while Solidity + * uses an invalid opcode to revert (consuming all remaining gas). + * + * Requirements: + * + * - The divisor cannot be zero. + */ + function div(uint256 a, uint256 b, string memory errorMessage) internal pure returns (uint256) { + require(b > 0, errorMessage); + uint256 c = a / b; + // assert(a == b * c + a % b); // There is no case in which this doesn't hold + + return c; + } + + /** + * @dev Returns the remainder of dividing two unsigned integers. (unsigned integer modulo), + * Reverts when dividing by zero. + * + * Counterpart to Solidity's `%` operator. This function uses a `revert` + * opcode (which leaves remaining gas untouched) while Solidity uses an + * invalid opcode to revert (consuming all remaining gas). + * + * Requirements: + * + * - The divisor cannot be zero. + */ + function mod(uint256 a, uint256 b) internal pure returns (uint256) { + return mod(a, b, "SafeMath: modulo by zero"); + } + + /** + * @dev Returns the remainder of dividing two unsigned integers. (unsigned integer modulo), + * Reverts with custom message when dividing by zero. + * + * Counterpart to Solidity's `%` operator. This function uses a `revert` + * opcode (which leaves remaining gas untouched) while Solidity uses an + * invalid opcode to revert (consuming all remaining gas). + * + * Requirements: + * + * - The divisor cannot be zero. + */ + function mod(uint256 a, uint256 b, string memory errorMessage) internal pure returns (uint256) { + require(b != 0, errorMessage); + return a % b; + } +} + +// File: @openzeppelin/contracts/token/ERC20/ERC20.sol + + +pragma solidity ^0.6.0; + + + + + +/** + * @dev Implementation of the {IERC20} interface. + * + * This implementation is agnostic to the way tokens are created. This means + * that a supply mechanism has to be added in a derived contract using {_mint}. + * For a generic mechanism see {ERC20PresetMinterPauser}. + * + * TIP: For a detailed writeup see our guide + * https://forum.zeppelin.solutions/t/how-to-implement-erc20-supply-mechanisms/226[How + * to implement supply mechanisms]. + * + * We have followed general OpenZeppelin guidelines: functions revert instead + * of returning `false` on failure. This behavior is nonetheless conventional + * and does not conflict with the expectations of ERC20 applications. + * + * Additionally, an {Approval} event is emitted on calls to {transferFrom}. + * This allows applications to reconstruct the allowance for all accounts just + * by listening to said events. Other implementations of the EIP may not emit + * these events, as it isn't required by the specification. + * + * Finally, the non-standard {decreaseAllowance} and {increaseAllowance} + * functions have been added to mitigate the well-known issues around setting + * allowances. See {IERC20-approve}. + */ +contract ERC20 is Context, IERC20 { + using SafeMath for uint256; + using Address for address; + + mapping (address => uint256) private _balances; + + mapping (address => mapping (address => uint256)) private _allowances; + + uint256 private _totalSupply; + + string private _name; + string private _symbol; + uint8 private _decimals; + + /** + * @dev Sets the values for {name} and {symbol}, initializes {decimals} with + * a default value of 18. + * + * To select a different value for {decimals}, use {_setupDecimals}. + * + * All three of these values are immutable: they can only be set once during + * construction. + */ + constructor (string memory name, string memory symbol) public { + _name = name; + _symbol = symbol; + _decimals = 18; + } + + /** + * @dev Returns the name of the token. + */ + function name() public view returns (string memory) { + return _name; + } + + /** + * @dev Returns the symbol of the token, usually a shorter version of the + * name. + */ + function symbol() public view returns (string memory) { + return _symbol; + } + + /** + * @dev Returns the number of decimals used to get its user representation. + * For example, if `decimals` equals `2`, a balance of `505` tokens should + * be displayed to a user as `5,05` (`505 / 10 ** 2`). + * + * Tokens usually opt for a value of 18, imitating the relationship between + * Ether and Wei. This is the value {ERC20} uses, unless {_setupDecimals} is + * called. + * + * NOTE: This information is only used for _display_ purposes: it in + * no way affects any of the arithmetic of the contract, including + * {IERC20-balanceOf} and {IERC20-transfer}. + */ + function decimals() public view returns (uint8) { + return _decimals; + } + + /** + * @dev See {IERC20-totalSupply}. + */ + function totalSupply() public view override returns (uint256) { + return _totalSupply; + } + + /** + * @dev See {IERC20-balanceOf}. + */ + function balanceOf(address account) public view override returns (uint256) { + return _balances[account]; + } + + /** + * @dev See {IERC20-transfer}. + * + * Requirements: + * + * - `recipient` cannot be the zero address. + * - the caller must have a balance of at least `amount`. + */ + function transfer(address recipient, uint256 amount) public virtual override returns (bool) { + _transfer(_msgSender(), recipient, amount); + return true; + } + + /** + * @dev See {IERC20-allowance}. + */ + function allowance(address owner, address spender) public view virtual override returns (uint256) { + return _allowances[owner][spender]; + } + + /** + * @dev See {IERC20-approve}. + * + * Requirements: + * + * - `spender` cannot be the zero address. + */ + function approve(address spender, uint256 amount) public virtual override returns (bool) { + _approve(_msgSender(), spender, amount); + return true; + } + + /** + * @dev See {IERC20-transferFrom}. + * + * Emits an {Approval} event indicating the updated allowance. This is not + * required by the EIP. See the note at the beginning of {ERC20}; + * + * Requirements: + * - `sender` and `recipient` cannot be the zero address. + * - `sender` must have a balance of at least `amount`. + * - the caller must have allowance for ``sender``'s tokens of at least + * `amount`. + */ + function transferFrom(address sender, address recipient, uint256 amount) public virtual override returns (bool) { + _transfer(sender, recipient, amount); + _approve(sender, _msgSender(), _allowances[sender][_msgSender()].sub(amount, "ERC20: transfer amount exceeds allowance")); + return true; + } + + /** + * @dev Atomically increases the allowance granted to `spender` by the caller. + * + * This is an alternative to {approve} that can be used as a mitigation for + * problems described in {IERC20-approve}. + * + * Emits an {Approval} event indicating the updated allowance. + * + * Requirements: + * + * - `spender` cannot be the zero address. + */ + function increaseAllowance(address spender, uint256 addedValue) public virtual returns (bool) { + _approve(_msgSender(), spender, _allowances[_msgSender()][spender].add(addedValue)); + return true; + } + + /** + * @dev Atomically decreases the allowance granted to `spender` by the caller. + * + * This is an alternative to {approve} that can be used as a mitigation for + * problems described in {IERC20-approve}. + * + * Emits an {Approval} event indicating the updated allowance. + * + * Requirements: + * + * - `spender` cannot be the zero address. + * - `spender` must have allowance for the caller of at least + * `subtractedValue`. + */ + function decreaseAllowance(address spender, uint256 subtractedValue) public virtual returns (bool) { + _approve(_msgSender(), spender, _allowances[_msgSender()][spender].sub(subtractedValue, "ERC20: decreased allowance below zero")); + return true; + } + + /** + * @dev Moves tokens `amount` from `sender` to `recipient`. + * + * This is internal function is equivalent to {transfer}, and can be used to + * e.g. implement automatic token fees, slashing mechanisms, etc. + * + * Emits a {Transfer} event. + * + * Requirements: + * + * - `sender` cannot be the zero address. + * - `recipient` cannot be the zero address. + * - `sender` must have a balance of at least `amount`. + */ + function _transfer(address sender, address recipient, uint256 amount) internal virtual { + require(sender != address(0), "ERC20: transfer from the zero address"); + require(recipient != address(0), "ERC20: transfer to the zero address"); + + _beforeTokenTransfer(sender, recipient, amount); + + _balances[sender] = _balances[sender].sub(amount, "ERC20: transfer amount exceeds balance"); + _balances[recipient] = _balances[recipient].add(amount); + emit Transfer(sender, recipient, amount); + } + + /** @dev Creates `amount` tokens and assigns them to `account`, increasing + * the total supply. + * + * Emits a {Transfer} event with `from` set to the zero address. + * + * Requirements + * + * - `to` cannot be the zero address. + */ + function _mint(address account, uint256 amount) internal virtual { + require(account != address(0), "ERC20: mint to the zero address"); + + _beforeTokenTransfer(address(0), account, amount); + + _totalSupply = _totalSupply.add(amount); + _balances[account] = _balances[account].add(amount); + emit Transfer(address(0), account, amount); + } + + /** + * @dev Destroys `amount` tokens from `account`, reducing the + * total supply. + * + * Emits a {Transfer} event with `to` set to the zero address. + * + * Requirements + * + * - `account` cannot be the zero address. + * - `account` must have at least `amount` tokens. + */ + function _burn(address account, uint256 amount) internal virtual { + require(account != address(0), "ERC20: burn from the zero address"); + + _beforeTokenTransfer(account, address(0), amount); + + _balances[account] = _balances[account].sub(amount, "ERC20: burn amount exceeds balance"); + _totalSupply = _totalSupply.sub(amount); + emit Transfer(account, address(0), amount); + } + + /** + * @dev Sets `amount` as the allowance of `spender` over the `owner` s tokens. + * + * This internal function is equivalent to `approve`, and can be used to + * e.g. set automatic allowances for certain subsystems, etc. + * + * Emits an {Approval} event. + * + * Requirements: + * + * - `owner` cannot be the zero address. + * - `spender` cannot be the zero address. + */ + function _approve(address owner, address spender, uint256 amount) internal virtual { + require(owner != address(0), "ERC20: approve from the zero address"); + require(spender != address(0), "ERC20: approve to the zero address"); + + _allowances[owner][spender] = amount; + emit Approval(owner, spender, amount); + } + + /** + * @dev Sets {decimals} to a value other than the default one of 18. + * + * WARNING: This function should only be called from the constructor. Most + * applications that interact with token contracts will not expect + * {decimals} to ever change, and may work incorrectly if it does. + */ + function _setupDecimals(uint8 decimals_) internal { + _decimals = decimals_; + } + + /** + * @dev Hook that is called before any transfer of tokens. This includes + * minting and burning. + * + * Calling conditions: + * + * - when `from` and `to` are both non-zero, `amount` of ``from``'s tokens + * will be to transferred to `to`. + * - when `from` is zero, `amount` tokens will be minted for `to`. + * - when `to` is zero, `amount` of ``from``'s tokens will be burned. + * - `from` and `to` are never both zero. + * + * To learn more about hooks, head to xref:ROOT:extending-contracts.adoc#using-hooks[Using Hooks]. + */ + function _beforeTokenTransfer(address from, address to, uint256 amount) internal virtual { } +} + +// File: @openzeppelin/contracts/token/ERC20/ERC20Burnable.sol + + +pragma solidity ^0.6.0; + + + +/** + * @dev Extension of {ERC20} that allows token holders to destroy both their own + * tokens and those that they have an allowance for, in a way that can be + * recognized off-chain (via event analysis). + */ +abstract contract ERC20Burnable is Context, ERC20 { + /** + * @dev Destroys `amount` tokens from the caller. + * + * See {ERC20-_burn}. + */ + function burn(uint256 amount) public virtual { + _burn(_msgSender(), amount); + } + + /** + * @dev Destroys `amount` tokens from `account`, deducting from the caller's + * allowance. + * + * See {ERC20-_burn} and {ERC20-allowance}. + * + * Requirements: + * + * - the caller must have allowance for ``accounts``'s tokens of at least + * `amount`. + */ + function burnFrom(address account, uint256 amount) public virtual { + uint256 decreasedAllowance = allowance(account, _msgSender()).sub(amount, "ERC20: burn amount exceeds allowance"); + + _approve(account, _msgSender(), decreasedAllowance); + _burn(account, amount); + } +} + +// File: @openzeppelin/contracts/utils/Pausable.sol + + +pragma solidity ^0.6.0; + + +/** + * @dev Contract module which allows children to implement an emergency stop + * mechanism that can be triggered by an authorized account. + * + * This module is used through inheritance. It will make available the + * modifiers `whenNotPaused` and `whenPaused`, which can be applied to + * the functions of your contract. Note that they will not be pausable by + * simply including this module, only once the modifiers are put in place. + */ +contract Pausable is Context { + /** + * @dev Emitted when the pause is triggered by `account`. + */ + event Paused(address account); + + /** + * @dev Emitted when the pause is lifted by `account`. + */ + event Unpaused(address account); + + bool private _paused; + + /** + * @dev Initializes the contract in unpaused state. + */ + constructor () internal { + _paused = false; + } + + /** + * @dev Returns true if the contract is paused, and false otherwise. + */ + function paused() public view returns (bool) { + return _paused; + } + + /** + * @dev Modifier to make a function callable only when the contract is not paused. + * + * Requirements: + * + * - The contract must not be paused. + */ + modifier whenNotPaused() { + require(!_paused, "Pausable: paused"); + _; + } + + /** + * @dev Modifier to make a function callable only when the contract is paused. + * + * Requirements: + * + * - The contract must be paused. + */ + modifier whenPaused() { + require(_paused, "Pausable: not paused"); + _; + } + + /** + * @dev Triggers stopped state. + * + * Requirements: + * + * - The contract must not be paused. + */ + function _pause() internal virtual whenNotPaused { + _paused = true; + emit Paused(_msgSender()); + } + + /** + * @dev Returns to normal state. + * + * Requirements: + * + * - The contract must be paused. + */ + function _unpause() internal virtual whenPaused { + _paused = false; + emit Unpaused(_msgSender()); + } +} + +// File: @openzeppelin/contracts/token/ERC20/ERC20Pausable.sol + + +pragma solidity ^0.6.0; + + + +/** + * @dev ERC20 token with pausable token transfers, minting and burning. + * + * Useful for scenarios such as preventing trades until the end of an evaluation + * period, or having an emergency switch for freezing all token transfers in the + * event of a large bug. + */ +abstract contract ERC20Pausable is ERC20, Pausable { + /** + * @dev See {ERC20-_beforeTokenTransfer}. + * + * Requirements: + * + * - the contract must not be paused. + */ + function _beforeTokenTransfer(address from, address to, uint256 amount) internal virtual override { + super._beforeTokenTransfer(from, to, amount); + + require(!paused(), "ERC20Pausable: token transfer while paused"); + } +} + +// File: @openzeppelin/contracts/presets/ERC20PresetMinterPauser.sol + + +pragma solidity ^0.6.0; + + + + + + +/** + * @dev {ERC20} token, including: + * + * - ability for holders to burn (destroy) their tokens + * - a minter role that allows for token minting (creation) + * - a pauser role that allows to stop all token transfers + * + * This contract uses {AccessControl} to lock permissioned functions using the + * different roles - head to its documentation for details. + * + * The account that deploys the contract will be granted the minter and pauser + * roles, as well as the default admin role, which will let it grant both minter + * and pauser roles to other accounts. + */ +contract ERC20PresetMinterPauser is Context, AccessControl, ERC20Burnable, ERC20Pausable { + bytes32 public constant MINTER_ROLE = keccak256("MINTER_ROLE"); + bytes32 public constant PAUSER_ROLE = keccak256("PAUSER_ROLE"); + + /** + * @dev Grants `DEFAULT_ADMIN_ROLE`, `MINTER_ROLE` and `PAUSER_ROLE` to the + * account that deploys the contract. + * + * See {ERC20-constructor}. + */ + constructor(string memory name, string memory symbol) public ERC20(name, symbol) { + _setupRole(DEFAULT_ADMIN_ROLE, _msgSender()); + + _setupRole(MINTER_ROLE, _msgSender()); + _setupRole(PAUSER_ROLE, _msgSender()); + } + + /** + * @dev Creates `amount` new tokens for `to`. + * + * See {ERC20-_mint}. + * + * Requirements: + * + * - the caller must have the `MINTER_ROLE`. + */ + function mint(address to, uint256 amount) public virtual { + require(hasRole(MINTER_ROLE, _msgSender()), "ERC20PresetMinterPauser: must have minter role to mint"); + _mint(to, amount); + } + + /** + * @dev Pauses all token transfers. + * + * See {ERC20Pausable} and {Pausable-_pause}. + * + * Requirements: + * + * - the caller must have the `PAUSER_ROLE`. + */ + function pause() public virtual { + require(hasRole(PAUSER_ROLE, _msgSender()), "ERC20PresetMinterPauser: must have pauser role to pause"); + _pause(); + } + + /** + * @dev Unpauses all token transfers. + * + * See {ERC20Pausable} and {Pausable-_unpause}. + * + * Requirements: + * + * - the caller must have the `PAUSER_ROLE`. + */ + function unpause() public virtual { + require(hasRole(PAUSER_ROLE, _msgSender()), "ERC20PresetMinterPauser: must have pauser role to unpause"); + _unpause(); + } + + function _beforeTokenTransfer(address from, address to, uint256 amount) internal virtual override(ERC20, ERC20Pausable) { + super._beforeTokenTransfer(from, to, amount); + } +} + +// File: contracts/Token.sol + +pragma solidity ^0.6.2; diff --git a/contracts/flat/ChildERC20.sol b/contracts/flat/ChildERC20.sol new file mode 100644 index 0000000000..dc53d143da --- /dev/null +++ b/contracts/flat/ChildERC20.sol @@ -0,0 +1,1509 @@ + +// File: @openzeppelin/contracts/GSN/Context.sol + +// SPDX-License-Identifier: MIT + +pragma solidity ^0.6.0; + +/* + * @dev Provides information about the current execution context, including the + * sender of the transaction and its data. While these are generally available + * via msg.sender and msg.data, they should not be accessed in such a direct + * manner, since when dealing with GSN meta-transactions the account sending and + * paying for execution may not be the actual sender (as far as an application + * is concerned). + * + * This contract is only required for intermediate, library-like contracts. + */ +abstract contract Context { + function _msgSender() internal view virtual returns (address payable) { + return msg.sender; + } + + function _msgData() internal view virtual returns (bytes memory) { + this; // silence state mutability warning without generating bytecode - see https://github.com/ethereum/solidity/issues/2691 + return msg.data; + } +} + +// File: @openzeppelin/contracts/token/ERC20/IERC20.sol + +// SPDX-License-Identifier: MIT + +pragma solidity ^0.6.0; + +/** + * @dev Interface of the ERC20 standard as defined in the EIP. + */ +interface IERC20 { + /** + * @dev Returns the amount of tokens in existence. + */ + function totalSupply() external view returns (uint256); + + /** + * @dev Returns the amount of tokens owned by `account`. + */ + function balanceOf(address account) external view returns (uint256); + + /** + * @dev Moves `amount` tokens from the caller's account to `recipient`. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * Emits a {Transfer} event. + */ + function transfer(address recipient, uint256 amount) external returns (bool); + + /** + * @dev Returns the remaining number of tokens that `spender` will be + * allowed to spend on behalf of `owner` through {transferFrom}. This is + * zero by default. + * + * This value changes when {approve} or {transferFrom} are called. + */ + function allowance(address owner, address spender) external view returns (uint256); + + /** + * @dev Sets `amount` as the allowance of `spender` over the caller's tokens. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * IMPORTANT: Beware that changing an allowance with this method brings the risk + * that someone may use both the old and the new allowance by unfortunate + * transaction ordering. One possible solution to mitigate this race + * condition is to first reduce the spender's allowance to 0 and set the + * desired value afterwards: + * https://github.com/ethereum/EIPs/issues/20#issuecomment-263524729 + * + * Emits an {Approval} event. + */ + function approve(address spender, uint256 amount) external returns (bool); + + /** + * @dev Moves `amount` tokens from `sender` to `recipient` using the + * allowance mechanism. `amount` is then deducted from the caller's + * allowance. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * Emits a {Transfer} event. + */ + function transferFrom(address sender, address recipient, uint256 amount) external returns (bool); + + /** + * @dev Emitted when `value` tokens are moved from one account (`from`) to + * another (`to`). + * + * Note that `value` may be zero. + */ + event Transfer(address indexed from, address indexed to, uint256 value); + + /** + * @dev Emitted when the allowance of a `spender` for an `owner` is set by + * a call to {approve}. `value` is the new allowance. + */ + event Approval(address indexed owner, address indexed spender, uint256 value); +} + +// File: @openzeppelin/contracts/math/SafeMath.sol + +// SPDX-License-Identifier: MIT + +pragma solidity ^0.6.0; + +/** + * @dev Wrappers over Solidity's arithmetic operations with added overflow + * checks. + * + * Arithmetic operations in Solidity wrap on overflow. This can easily result + * in bugs, because programmers usually assume that an overflow raises an + * error, which is the standard behavior in high level programming languages. + * `SafeMath` restores this intuition by reverting the transaction when an + * operation overflows. + * + * Using this library instead of the unchecked operations eliminates an entire + * class of bugs, so it's recommended to use it always. + */ +library SafeMath { + /** + * @dev Returns the addition of two unsigned integers, reverting on + * overflow. + * + * Counterpart to Solidity's `+` operator. + * + * Requirements: + * + * - Addition cannot overflow. + */ + function add(uint256 a, uint256 b) internal pure returns (uint256) { + uint256 c = a + b; + require(c >= a, "SafeMath: addition overflow"); + + return c; + } + + /** + * @dev Returns the subtraction of two unsigned integers, reverting on + * overflow (when the result is negative). + * + * Counterpart to Solidity's `-` operator. + * + * Requirements: + * + * - Subtraction cannot overflow. + */ + function sub(uint256 a, uint256 b) internal pure returns (uint256) { + return sub(a, b, "SafeMath: subtraction overflow"); + } + + /** + * @dev Returns the subtraction of two unsigned integers, reverting with custom message on + * overflow (when the result is negative). + * + * Counterpart to Solidity's `-` operator. + * + * Requirements: + * + * - Subtraction cannot overflow. + */ + function sub(uint256 a, uint256 b, string memory errorMessage) internal pure returns (uint256) { + require(b <= a, errorMessage); + uint256 c = a - b; + + return c; + } + + /** + * @dev Returns the multiplication of two unsigned integers, reverting on + * overflow. + * + * Counterpart to Solidity's `*` operator. + * + * Requirements: + * + * - Multiplication cannot overflow. + */ + function mul(uint256 a, uint256 b) internal pure returns (uint256) { + // Gas optimization: this is cheaper than requiring 'a' not being zero, but the + // benefit is lost if 'b' is also tested. + // See: https://github.com/OpenZeppelin/openzeppelin-contracts/pull/522 + if (a == 0) { + return 0; + } + + uint256 c = a * b; + require(c / a == b, "SafeMath: multiplication overflow"); + + return c; + } + + /** + * @dev Returns the integer division of two unsigned integers. Reverts on + * division by zero. The result is rounded towards zero. + * + * Counterpart to Solidity's `/` operator. Note: this function uses a + * `revert` opcode (which leaves remaining gas untouched) while Solidity + * uses an invalid opcode to revert (consuming all remaining gas). + * + * Requirements: + * + * - The divisor cannot be zero. + */ + function div(uint256 a, uint256 b) internal pure returns (uint256) { + return div(a, b, "SafeMath: division by zero"); + } + + /** + * @dev Returns the integer division of two unsigned integers. Reverts with custom message on + * division by zero. The result is rounded towards zero. + * + * Counterpart to Solidity's `/` operator. Note: this function uses a + * `revert` opcode (which leaves remaining gas untouched) while Solidity + * uses an invalid opcode to revert (consuming all remaining gas). + * + * Requirements: + * + * - The divisor cannot be zero. + */ + function div(uint256 a, uint256 b, string memory errorMessage) internal pure returns (uint256) { + require(b > 0, errorMessage); + uint256 c = a / b; + // assert(a == b * c + a % b); // There is no case in which this doesn't hold + + return c; + } + + /** + * @dev Returns the remainder of dividing two unsigned integers. (unsigned integer modulo), + * Reverts when dividing by zero. + * + * Counterpart to Solidity's `%` operator. This function uses a `revert` + * opcode (which leaves remaining gas untouched) while Solidity uses an + * invalid opcode to revert (consuming all remaining gas). + * + * Requirements: + * + * - The divisor cannot be zero. + */ + function mod(uint256 a, uint256 b) internal pure returns (uint256) { + return mod(a, b, "SafeMath: modulo by zero"); + } + + /** + * @dev Returns the remainder of dividing two unsigned integers. (unsigned integer modulo), + * Reverts with custom message when dividing by zero. + * + * Counterpart to Solidity's `%` operator. This function uses a `revert` + * opcode (which leaves remaining gas untouched) while Solidity uses an + * invalid opcode to revert (consuming all remaining gas). + * + * Requirements: + * + * - The divisor cannot be zero. + */ + function mod(uint256 a, uint256 b, string memory errorMessage) internal pure returns (uint256) { + require(b != 0, errorMessage); + return a % b; + } +} + +// File: @openzeppelin/contracts/utils/Address.sol + +// SPDX-License-Identifier: MIT + +pragma solidity ^0.6.2; + +/** + * @dev Collection of functions related to the address type + */ +library Address { + /** + * @dev Returns true if `account` is a contract. + * + * [IMPORTANT] + * ==== + * It is unsafe to assume that an address for which this function returns + * false is an externally-owned account (EOA) and not a contract. + * + * Among others, `isContract` will return false for the following + * types of addresses: + * + * - an externally-owned account + * - a contract in construction + * - an address where a contract will be created + * - an address where a contract lived, but was destroyed + * ==== + */ + function isContract(address account) internal view returns (bool) { + // According to EIP-1052, 0x0 is the value returned for not-yet created accounts + // and 0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470 is returned + // for accounts without code, i.e. `keccak256('')` + bytes32 codehash; + bytes32 accountHash = 0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470; + // solhint-disable-next-line no-inline-assembly + assembly { codehash := extcodehash(account) } + return (codehash != accountHash && codehash != 0x0); + } + + /** + * @dev Replacement for Solidity's `transfer`: sends `amount` wei to + * `recipient`, forwarding all available gas and reverting on errors. + * + * https://eips.ethereum.org/EIPS/eip-1884[EIP1884] increases the gas cost + * of certain opcodes, possibly making contracts go over the 2300 gas limit + * imposed by `transfer`, making them unable to receive funds via + * `transfer`. {sendValue} removes this limitation. + * + * https://diligence.consensys.net/posts/2019/09/stop-using-soliditys-transfer-now/[Learn more]. + * + * IMPORTANT: because control is transferred to `recipient`, care must be + * taken to not create reentrancy vulnerabilities. Consider using + * {ReentrancyGuard} or the + * https://solidity.readthedocs.io/en/v0.5.11/security-considerations.html#use-the-checks-effects-interactions-pattern[checks-effects-interactions pattern]. + */ + function sendValue(address payable recipient, uint256 amount) internal { + require(address(this).balance >= amount, "Address: insufficient balance"); + + // solhint-disable-next-line avoid-low-level-calls, avoid-call-value + (bool success, ) = recipient.call{ value: amount }(""); + require(success, "Address: unable to send value, recipient may have reverted"); + } + + /** + * @dev Performs a Solidity function call using a low level `call`. A + * plain`call` is an unsafe replacement for a function call: use this + * function instead. + * + * If `target` reverts with a revert reason, it is bubbled up by this + * function (like regular Solidity function calls). + * + * Returns the raw returned data. To convert to the expected return value, + * use https://solidity.readthedocs.io/en/latest/units-and-global-variables.html?highlight=abi.decode#abi-encoding-and-decoding-functions[`abi.decode`]. + * + * Requirements: + * + * - `target` must be a contract. + * - calling `target` with `data` must not revert. + * + * _Available since v3.1._ + */ + function functionCall(address target, bytes memory data) internal returns (bytes memory) { + return functionCall(target, data, "Address: low-level call failed"); + } + + /** + * @dev Same as {xref-Address-functionCall-address-bytes-}[`functionCall`], but with + * `errorMessage` as a fallback revert reason when `target` reverts. + * + * _Available since v3.1._ + */ + function functionCall(address target, bytes memory data, string memory errorMessage) internal returns (bytes memory) { + return _functionCallWithValue(target, data, 0, errorMessage); + } + + /** + * @dev Same as {xref-Address-functionCall-address-bytes-}[`functionCall`], + * but also transferring `value` wei to `target`. + * + * Requirements: + * + * - the calling contract must have an ETH balance of at least `value`. + * - the called Solidity function must be `payable`. + * + * _Available since v3.1._ + */ + function functionCallWithValue(address target, bytes memory data, uint256 value) internal returns (bytes memory) { + return functionCallWithValue(target, data, value, "Address: low-level call with value failed"); + } + + /** + * @dev Same as {xref-Address-functionCallWithValue-address-bytes-uint256-}[`functionCallWithValue`], but + * with `errorMessage` as a fallback revert reason when `target` reverts. + * + * _Available since v3.1._ + */ + function functionCallWithValue(address target, bytes memory data, uint256 value, string memory errorMessage) internal returns (bytes memory) { + require(address(this).balance >= value, "Address: insufficient balance for call"); + return _functionCallWithValue(target, data, value, errorMessage); + } + + function _functionCallWithValue(address target, bytes memory data, uint256 weiValue, string memory errorMessage) private returns (bytes memory) { + require(isContract(target), "Address: call to non-contract"); + + // solhint-disable-next-line avoid-low-level-calls + (bool success, bytes memory returndata) = target.call{ value: weiValue }(data); + if (success) { + return returndata; + } else { + // Look for revert reason and bubble it up if present + if (returndata.length > 0) { + // The easiest way to bubble the revert reason is using memory via assembly + + // solhint-disable-next-line no-inline-assembly + assembly { + let returndata_size := mload(returndata) + revert(add(32, returndata), returndata_size) + } + } else { + revert(errorMessage); + } + } + } +} + +// File: @openzeppelin/contracts/token/ERC20/ERC20.sol + +// SPDX-License-Identifier: MIT + +pragma solidity ^0.6.0; + + + + + +/** + * @dev Implementation of the {IERC20} interface. + * + * This implementation is agnostic to the way tokens are created. This means + * that a supply mechanism has to be added in a derived contract using {_mint}. + * For a generic mechanism see {ERC20PresetMinterPauser}. + * + * TIP: For a detailed writeup see our guide + * https://forum.zeppelin.solutions/t/how-to-implement-erc20-supply-mechanisms/226[How + * to implement supply mechanisms]. + * + * We have followed general OpenZeppelin guidelines: functions revert instead + * of returning `false` on failure. This behavior is nonetheless conventional + * and does not conflict with the expectations of ERC20 applications. + * + * Additionally, an {Approval} event is emitted on calls to {transferFrom}. + * This allows applications to reconstruct the allowance for all accounts just + * by listening to said events. Other implementations of the EIP may not emit + * these events, as it isn't required by the specification. + * + * Finally, the non-standard {decreaseAllowance} and {increaseAllowance} + * functions have been added to mitigate the well-known issues around setting + * allowances. See {IERC20-approve}. + */ +contract ERC20 is Context, IERC20 { + using SafeMath for uint256; + using Address for address; + + mapping (address => uint256) private _balances; + + mapping (address => mapping (address => uint256)) private _allowances; + + uint256 private _totalSupply; + + string private _name; + string private _symbol; + uint8 private _decimals; + + /** + * @dev Sets the values for {name} and {symbol}, initializes {decimals} with + * a default value of 18. + * + * To select a different value for {decimals}, use {_setupDecimals}. + * + * All three of these values are immutable: they can only be set once during + * construction. + */ + constructor (string memory name, string memory symbol) public { + _name = name; + _symbol = symbol; + _decimals = 18; + } + + /** + * @dev Returns the name of the token. + */ + function name() public view returns (string memory) { + return _name; + } + + /** + * @dev Returns the symbol of the token, usually a shorter version of the + * name. + */ + function symbol() public view returns (string memory) { + return _symbol; + } + + /** + * @dev Returns the number of decimals used to get its user representation. + * For example, if `decimals` equals `2`, a balance of `505` tokens should + * be displayed to a user as `5,05` (`505 / 10 ** 2`). + * + * Tokens usually opt for a value of 18, imitating the relationship between + * Ether and Wei. This is the value {ERC20} uses, unless {_setupDecimals} is + * called. + * + * NOTE: This information is only used for _display_ purposes: it in + * no way affects any of the arithmetic of the contract, including + * {IERC20-balanceOf} and {IERC20-transfer}. + */ + function decimals() public view returns (uint8) { + return _decimals; + } + + /** + * @dev See {IERC20-totalSupply}. + */ + function totalSupply() public view override returns (uint256) { + return _totalSupply; + } + + /** + * @dev See {IERC20-balanceOf}. + */ + function balanceOf(address account) public view override returns (uint256) { + return _balances[account]; + } + + /** + * @dev See {IERC20-transfer}. + * + * Requirements: + * + * - `recipient` cannot be the zero address. + * - the caller must have a balance of at least `amount`. + */ + function transfer(address recipient, uint256 amount) public virtual override returns (bool) { + _transfer(_msgSender(), recipient, amount); + return true; + } + + /** + * @dev See {IERC20-allowance}. + */ + function allowance(address owner, address spender) public view virtual override returns (uint256) { + return _allowances[owner][spender]; + } + + /** + * @dev See {IERC20-approve}. + * + * Requirements: + * + * - `spender` cannot be the zero address. + */ + function approve(address spender, uint256 amount) public virtual override returns (bool) { + _approve(_msgSender(), spender, amount); + return true; + } + + /** + * @dev See {IERC20-transferFrom}. + * + * Emits an {Approval} event indicating the updated allowance. This is not + * required by the EIP. See the note at the beginning of {ERC20}; + * + * Requirements: + * - `sender` and `recipient` cannot be the zero address. + * - `sender` must have a balance of at least `amount`. + * - the caller must have allowance for ``sender``'s tokens of at least + * `amount`. + */ + function transferFrom(address sender, address recipient, uint256 amount) public virtual override returns (bool) { + _transfer(sender, recipient, amount); + _approve(sender, _msgSender(), _allowances[sender][_msgSender()].sub(amount, "ERC20: transfer amount exceeds allowance")); + return true; + } + + /** + * @dev Atomically increases the allowance granted to `spender` by the caller. + * + * This is an alternative to {approve} that can be used as a mitigation for + * problems described in {IERC20-approve}. + * + * Emits an {Approval} event indicating the updated allowance. + * + * Requirements: + * + * - `spender` cannot be the zero address. + */ + function increaseAllowance(address spender, uint256 addedValue) public virtual returns (bool) { + _approve(_msgSender(), spender, _allowances[_msgSender()][spender].add(addedValue)); + return true; + } + + /** + * @dev Atomically decreases the allowance granted to `spender` by the caller. + * + * This is an alternative to {approve} that can be used as a mitigation for + * problems described in {IERC20-approve}. + * + * Emits an {Approval} event indicating the updated allowance. + * + * Requirements: + * + * - `spender` cannot be the zero address. + * - `spender` must have allowance for the caller of at least + * `subtractedValue`. + */ + function decreaseAllowance(address spender, uint256 subtractedValue) public virtual returns (bool) { + _approve(_msgSender(), spender, _allowances[_msgSender()][spender].sub(subtractedValue, "ERC20: decreased allowance below zero")); + return true; + } + + /** + * @dev Moves tokens `amount` from `sender` to `recipient`. + * + * This is internal function is equivalent to {transfer}, and can be used to + * e.g. implement automatic token fees, slashing mechanisms, etc. + * + * Emits a {Transfer} event. + * + * Requirements: + * + * - `sender` cannot be the zero address. + * - `recipient` cannot be the zero address. + * - `sender` must have a balance of at least `amount`. + */ + function _transfer(address sender, address recipient, uint256 amount) internal virtual { + require(sender != address(0), "ERC20: transfer from the zero address"); + require(recipient != address(0), "ERC20: transfer to the zero address"); + + _beforeTokenTransfer(sender, recipient, amount); + + _balances[sender] = _balances[sender].sub(amount, "ERC20: transfer amount exceeds balance"); + _balances[recipient] = _balances[recipient].add(amount); + emit Transfer(sender, recipient, amount); + } + + /** @dev Creates `amount` tokens and assigns them to `account`, increasing + * the total supply. + * + * Emits a {Transfer} event with `from` set to the zero address. + * + * Requirements + * + * - `to` cannot be the zero address. + */ + function _mint(address account, uint256 amount) internal virtual { + require(account != address(0), "ERC20: mint to the zero address"); + + _beforeTokenTransfer(address(0), account, amount); + + _totalSupply = _totalSupply.add(amount); + _balances[account] = _balances[account].add(amount); + emit Transfer(address(0), account, amount); + } + + /** + * @dev Destroys `amount` tokens from `account`, reducing the + * total supply. + * + * Emits a {Transfer} event with `to` set to the zero address. + * + * Requirements + * + * - `account` cannot be the zero address. + * - `account` must have at least `amount` tokens. + */ + function _burn(address account, uint256 amount) internal virtual { + require(account != address(0), "ERC20: burn from the zero address"); + + _beforeTokenTransfer(account, address(0), amount); + + _balances[account] = _balances[account].sub(amount, "ERC20: burn amount exceeds balance"); + _totalSupply = _totalSupply.sub(amount); + emit Transfer(account, address(0), amount); + } + + /** + * @dev Sets `amount` as the allowance of `spender` over the `owner`s tokens. + * + * This is internal function is equivalent to `approve`, and can be used to + * e.g. set automatic allowances for certain subsystems, etc. + * + * Emits an {Approval} event. + * + * Requirements: + * + * - `owner` cannot be the zero address. + * - `spender` cannot be the zero address. + */ + function _approve(address owner, address spender, uint256 amount) internal virtual { + require(owner != address(0), "ERC20: approve from the zero address"); + require(spender != address(0), "ERC20: approve to the zero address"); + + _allowances[owner][spender] = amount; + emit Approval(owner, spender, amount); + } + + /** + * @dev Sets {decimals} to a value other than the default one of 18. + * + * WARNING: This function should only be called from the constructor. Most + * applications that interact with token contracts will not expect + * {decimals} to ever change, and may work incorrectly if it does. + */ + function _setupDecimals(uint8 decimals_) internal { + _decimals = decimals_; + } + + /** + * @dev Hook that is called before any transfer of tokens. This includes + * minting and burning. + * + * Calling conditions: + * + * - when `from` and `to` are both non-zero, `amount` of ``from``'s tokens + * will be to transferred to `to`. + * - when `from` is zero, `amount` tokens will be minted for `to`. + * - when `to` is zero, `amount` of ``from``'s tokens will be burned. + * - `from` and `to` are never both zero. + * + * To learn more about hooks, head to xref:ROOT:extending-contracts.adoc#using-hooks[Using Hooks]. + */ + function _beforeTokenTransfer(address from, address to, uint256 amount) internal virtual { } +} + +// File: @openzeppelin/contracts/utils/EnumerableSet.sol + +// SPDX-License-Identifier: MIT + +pragma solidity ^0.6.0; + +/** + * @dev Library for managing + * https://en.wikipedia.org/wiki/Set_(abstract_data_type)[sets] of primitive + * types. + * + * Sets have the following properties: + * + * - Elements are added, removed, and checked for existence in constant time + * (O(1)). + * - Elements are enumerated in O(n). No guarantees are made on the ordering. + * + * ``` + * contract Example { + * // Add the library methods + * using EnumerableSet for EnumerableSet.AddressSet; + * + * // Declare a set state variable + * EnumerableSet.AddressSet private mySet; + * } + * ``` + * + * As of v3.0.0, only sets of type `address` (`AddressSet`) and `uint256` + * (`UintSet`) are supported. + */ +library EnumerableSet { + // To implement this library for multiple types with as little code + // repetition as possible, we write it in terms of a generic Set type with + // bytes32 values. + // The Set implementation uses private functions, and user-facing + // implementations (such as AddressSet) are just wrappers around the + // underlying Set. + // This means that we can only create new EnumerableSets for types that fit + // in bytes32. + + struct Set { + // Storage of set values + bytes32[] _values; + + // Position of the value in the `values` array, plus 1 because index 0 + // means a value is not in the set. + mapping (bytes32 => uint256) _indexes; + } + + /** + * @dev Add a value to a set. O(1). + * + * Returns true if the value was added to the set, that is if it was not + * already present. + */ + function _add(Set storage set, bytes32 value) private returns (bool) { + if (!_contains(set, value)) { + set._values.push(value); + // The value is stored at length-1, but we add 1 to all indexes + // and use 0 as a sentinel value + set._indexes[value] = set._values.length; + return true; + } else { + return false; + } + } + + /** + * @dev Removes a value from a set. O(1). + * + * Returns true if the value was removed from the set, that is if it was + * present. + */ + function _remove(Set storage set, bytes32 value) private returns (bool) { + // We read and store the value's index to prevent multiple reads from the same storage slot + uint256 valueIndex = set._indexes[value]; + + if (valueIndex != 0) { // Equivalent to contains(set, value) + // To delete an element from the _values array in O(1), we swap the element to delete with the last one in + // the array, and then remove the last element (sometimes called as 'swap and pop'). + // This modifies the order of the array, as noted in {at}. + + uint256 toDeleteIndex = valueIndex - 1; + uint256 lastIndex = set._values.length - 1; + + // When the value to delete is the last one, the swap operation is unnecessary. However, since this occurs + // so rarely, we still do the swap anyway to avoid the gas cost of adding an 'if' statement. + + bytes32 lastvalue = set._values[lastIndex]; + + // Move the last value to the index where the value to delete is + set._values[toDeleteIndex] = lastvalue; + // Update the index for the moved value + set._indexes[lastvalue] = toDeleteIndex + 1; // All indexes are 1-based + + // Delete the slot where the moved value was stored + set._values.pop(); + + // Delete the index for the deleted slot + delete set._indexes[value]; + + return true; + } else { + return false; + } + } + + /** + * @dev Returns true if the value is in the set. O(1). + */ + function _contains(Set storage set, bytes32 value) private view returns (bool) { + return set._indexes[value] != 0; + } + + /** + * @dev Returns the number of values on the set. O(1). + */ + function _length(Set storage set) private view returns (uint256) { + return set._values.length; + } + + /** + * @dev Returns the value stored at position `index` in the set. O(1). + * + * Note that there are no guarantees on the ordering of values inside the + * array, and it may change when more values are added or removed. + * + * Requirements: + * + * - `index` must be strictly less than {length}. + */ + function _at(Set storage set, uint256 index) private view returns (bytes32) { + require(set._values.length > index, "EnumerableSet: index out of bounds"); + return set._values[index]; + } + + // AddressSet + + struct AddressSet { + Set _inner; + } + + /** + * @dev Add a value to a set. O(1). + * + * Returns true if the value was added to the set, that is if it was not + * already present. + */ + function add(AddressSet storage set, address value) internal returns (bool) { + return _add(set._inner, bytes32(uint256(value))); + } + + /** + * @dev Removes a value from a set. O(1). + * + * Returns true if the value was removed from the set, that is if it was + * present. + */ + function remove(AddressSet storage set, address value) internal returns (bool) { + return _remove(set._inner, bytes32(uint256(value))); + } + + /** + * @dev Returns true if the value is in the set. O(1). + */ + function contains(AddressSet storage set, address value) internal view returns (bool) { + return _contains(set._inner, bytes32(uint256(value))); + } + + /** + * @dev Returns the number of values in the set. O(1). + */ + function length(AddressSet storage set) internal view returns (uint256) { + return _length(set._inner); + } + + /** + * @dev Returns the value stored at position `index` in the set. O(1). + * + * Note that there are no guarantees on the ordering of values inside the + * array, and it may change when more values are added or removed. + * + * Requirements: + * + * - `index` must be strictly less than {length}. + */ + function at(AddressSet storage set, uint256 index) internal view returns (address) { + return address(uint256(_at(set._inner, index))); + } + + + // UintSet + + struct UintSet { + Set _inner; + } + + /** + * @dev Add a value to a set. O(1). + * + * Returns true if the value was added to the set, that is if it was not + * already present. + */ + function add(UintSet storage set, uint256 value) internal returns (bool) { + return _add(set._inner, bytes32(value)); + } + + /** + * @dev Removes a value from a set. O(1). + * + * Returns true if the value was removed from the set, that is if it was + * present. + */ + function remove(UintSet storage set, uint256 value) internal returns (bool) { + return _remove(set._inner, bytes32(value)); + } + + /** + * @dev Returns true if the value is in the set. O(1). + */ + function contains(UintSet storage set, uint256 value) internal view returns (bool) { + return _contains(set._inner, bytes32(value)); + } + + /** + * @dev Returns the number of values on the set. O(1). + */ + function length(UintSet storage set) internal view returns (uint256) { + return _length(set._inner); + } + + /** + * @dev Returns the value stored at position `index` in the set. O(1). + * + * Note that there are no guarantees on the ordering of values inside the + * array, and it may change when more values are added or removed. + * + * Requirements: + * + * - `index` must be strictly less than {length}. + */ + function at(UintSet storage set, uint256 index) internal view returns (uint256) { + return uint256(_at(set._inner, index)); + } +} + +// File: @openzeppelin/contracts/access/AccessControl.sol + +// SPDX-License-Identifier: MIT + +pragma solidity ^0.6.0; + + + + +/** + * @dev Contract module that allows children to implement role-based access + * control mechanisms. + * + * Roles are referred to by their `bytes32` identifier. These should be exposed + * in the external API and be unique. The best way to achieve this is by + * using `public constant` hash digests: + * + * ``` + * bytes32 public constant MY_ROLE = keccak256("MY_ROLE"); + * ``` + * + * Roles can be used to represent a set of permissions. To restrict access to a + * function call, use {hasRole}: + * + * ``` + * function foo() public { + * require(hasRole(MY_ROLE, msg.sender)); + * ... + * } + * ``` + * + * Roles can be granted and revoked dynamically via the {grantRole} and + * {revokeRole} functions. Each role has an associated admin role, and only + * accounts that have a role's admin role can call {grantRole} and {revokeRole}. + * + * By default, the admin role for all roles is `DEFAULT_ADMIN_ROLE`, which means + * that only accounts with this role will be able to grant or revoke other + * roles. More complex role relationships can be created by using + * {_setRoleAdmin}. + * + * WARNING: The `DEFAULT_ADMIN_ROLE` is also its own admin: it has permission to + * grant and revoke this role. Extra precautions should be taken to secure + * accounts that have been granted it. + */ +abstract contract AccessControl is Context { + using EnumerableSet for EnumerableSet.AddressSet; + using Address for address; + + struct RoleData { + EnumerableSet.AddressSet members; + bytes32 adminRole; + } + + mapping (bytes32 => RoleData) private _roles; + + bytes32 public constant DEFAULT_ADMIN_ROLE = 0x00; + + /** + * @dev Emitted when `newAdminRole` is set as ``role``'s admin role, replacing `previousAdminRole` + * + * `DEFAULT_ADMIN_ROLE` is the starting admin for all roles, despite + * {RoleAdminChanged} not being emitted signaling this. + * + * _Available since v3.1._ + */ + event RoleAdminChanged(bytes32 indexed role, bytes32 indexed previousAdminRole, bytes32 indexed newAdminRole); + + /** + * @dev Emitted when `account` is granted `role`. + * + * `sender` is the account that originated the contract call, an admin role + * bearer except when using {_setupRole}. + */ + event RoleGranted(bytes32 indexed role, address indexed account, address indexed sender); + + /** + * @dev Emitted when `account` is revoked `role`. + * + * `sender` is the account that originated the contract call: + * - if using `revokeRole`, it is the admin role bearer + * - if using `renounceRole`, it is the role bearer (i.e. `account`) + */ + event RoleRevoked(bytes32 indexed role, address indexed account, address indexed sender); + + /** + * @dev Returns `true` if `account` has been granted `role`. + */ + function hasRole(bytes32 role, address account) public view returns (bool) { + return _roles[role].members.contains(account); + } + + /** + * @dev Returns the number of accounts that have `role`. Can be used + * together with {getRoleMember} to enumerate all bearers of a role. + */ + function getRoleMemberCount(bytes32 role) public view returns (uint256) { + return _roles[role].members.length(); + } + + /** + * @dev Returns one of the accounts that have `role`. `index` must be a + * value between 0 and {getRoleMemberCount}, non-inclusive. + * + * Role bearers are not sorted in any particular way, and their ordering may + * change at any point. + * + * WARNING: When using {getRoleMember} and {getRoleMemberCount}, make sure + * you perform all queries on the same block. See the following + * https://forum.openzeppelin.com/t/iterating-over-elements-on-enumerableset-in-openzeppelin-contracts/2296[forum post] + * for more information. + */ + function getRoleMember(bytes32 role, uint256 index) public view returns (address) { + return _roles[role].members.at(index); + } + + /** + * @dev Returns the admin role that controls `role`. See {grantRole} and + * {revokeRole}. + * + * To change a role's admin, use {_setRoleAdmin}. + */ + function getRoleAdmin(bytes32 role) public view returns (bytes32) { + return _roles[role].adminRole; + } + + /** + * @dev Grants `role` to `account`. + * + * If `account` had not been already granted `role`, emits a {RoleGranted} + * event. + * + * Requirements: + * + * - the caller must have ``role``'s admin role. + */ + function grantRole(bytes32 role, address account) public virtual { + require(hasRole(_roles[role].adminRole, _msgSender()), "AccessControl: sender must be an admin to grant"); + + _grantRole(role, account); + } + + /** + * @dev Revokes `role` from `account`. + * + * If `account` had been granted `role`, emits a {RoleRevoked} event. + * + * Requirements: + * + * - the caller must have ``role``'s admin role. + */ + function revokeRole(bytes32 role, address account) public virtual { + require(hasRole(_roles[role].adminRole, _msgSender()), "AccessControl: sender must be an admin to revoke"); + + _revokeRole(role, account); + } + + /** + * @dev Revokes `role` from the calling account. + * + * Roles are often managed via {grantRole} and {revokeRole}: this function's + * purpose is to provide a mechanism for accounts to lose their privileges + * if they are compromised (such as when a trusted device is misplaced). + * + * If the calling account had been granted `role`, emits a {RoleRevoked} + * event. + * + * Requirements: + * + * - the caller must be `account`. + */ + function renounceRole(bytes32 role, address account) public virtual { + require(account == _msgSender(), "AccessControl: can only renounce roles for self"); + + _revokeRole(role, account); + } + + /** + * @dev Grants `role` to `account`. + * + * If `account` had not been already granted `role`, emits a {RoleGranted} + * event. Note that unlike {grantRole}, this function doesn't perform any + * checks on the calling account. + * + * [WARNING] + * ==== + * This function should only be called from the constructor when setting + * up the initial roles for the system. + * + * Using this function in any other way is effectively circumventing the admin + * system imposed by {AccessControl}. + * ==== + */ + function _setupRole(bytes32 role, address account) internal virtual { + _grantRole(role, account); + } + + /** + * @dev Sets `adminRole` as ``role``'s admin role. + * + * Emits a {RoleAdminChanged} event. + */ + function _setRoleAdmin(bytes32 role, bytes32 adminRole) internal virtual { + emit RoleAdminChanged(role, _roles[role].adminRole, adminRole); + _roles[role].adminRole = adminRole; + } + + function _grantRole(bytes32 role, address account) private { + if (_roles[role].members.add(account)) { + emit RoleGranted(role, account, _msgSender()); + } + } + + function _revokeRole(bytes32 role, address account) private { + if (_roles[role].members.remove(account)) { + emit RoleRevoked(role, account, _msgSender()); + } + } +} + +// File: contracts/common/AccessControlMixin.sol + +pragma solidity 0.6.6; + + +contract AccessControlMixin is AccessControl { + string private _revertMsg; + function _setupContractId(string memory contractId) internal { + _revertMsg = string(abi.encodePacked(contractId, ": INSUFFICIENT_PERMISSIONS")); + } + + modifier only(bytes32 role) { + require( + hasRole(role, _msgSender()), + _revertMsg + ); + _; + } +} + +// File: contracts/child/ChildToken/IChildToken.sol + +pragma solidity 0.6.6; + +interface IChildToken { + function deposit(address user, bytes calldata depositData) external; +} + +// File: contracts/common/Initializable.sol + +pragma solidity 0.6.6; + +contract Initializable { + bool inited = false; + + modifier initializer() { + require(!inited, "already inited"); + _; + inited = true; + } +} + +// File: contracts/common/EIP712Base.sol + +pragma solidity 0.6.6; + + +contract EIP712Base is Initializable { + struct EIP712Domain { + string name; + string version; + address verifyingContract; + bytes32 salt; + } + + string constant public ERC712_VERSION = "1"; + + bytes32 internal constant EIP712_DOMAIN_TYPEHASH = keccak256( + bytes( + "EIP712Domain(string name,string version,address verifyingContract,bytes32 salt)" + ) + ); + bytes32 internal domainSeperator; + + // supposed to be called once while initializing. + // one of the contractsa that inherits this contract follows proxy pattern + // so it is not possible to do this in a constructor + function _initializeEIP712( + string memory name + ) + internal + initializer + { + _setDomainSeperator(name); + } + + function _setDomainSeperator(string memory name) internal { + domainSeperator = keccak256( + abi.encode( + EIP712_DOMAIN_TYPEHASH, + keccak256(bytes(name)), + keccak256(bytes(ERC712_VERSION)), + address(this), + bytes32(getChainId()) + ) + ); + } + + function getDomainSeperator() public view returns (bytes32) { + return domainSeperator; + } + + function getChainId() public pure returns (uint256) { + uint256 id; + assembly { + id := chainid() + } + return id; + } + + /** + * Accept message hash and returns hash message in EIP712 compatible form + * So that it can be used to recover signer from signature signed using EIP712 formatted data + * https://eips.ethereum.org/EIPS/eip-712 + * "\\x19" makes the encoding deterministic + * "\\x01" is the version byte to make it compatible to EIP-191 + */ + function toTypedMessageHash(bytes32 messageHash) + internal + view + returns (bytes32) + { + return + keccak256( + abi.encodePacked("\x19\x01", getDomainSeperator(), messageHash) + ); + } +} + +// File: contracts/common/NativeMetaTransaction.sol + +pragma solidity 0.6.6; + + + +contract NativeMetaTransaction is EIP712Base { + using SafeMath for uint256; + bytes32 private constant META_TRANSACTION_TYPEHASH = keccak256( + bytes( + "MetaTransaction(uint256 nonce,address from,bytes functionSignature)" + ) + ); + event MetaTransactionExecuted( + address userAddress, + address payable relayerAddress, + bytes functionSignature + ); + mapping(address => uint256) nonces; + + /* + * Meta transaction structure. + * No point of including value field here as if user is doing value transfer then he has the funds to pay for gas + * He should call the desired function directly in that case. + */ + struct MetaTransaction { + uint256 nonce; + address from; + bytes functionSignature; + } + + function executeMetaTransaction( + address userAddress, + bytes memory functionSignature, + bytes32 sigR, + bytes32 sigS, + uint8 sigV + ) public payable returns (bytes memory) { + MetaTransaction memory metaTx = MetaTransaction({ + nonce: nonces[userAddress], + from: userAddress, + functionSignature: functionSignature + }); + + require( + verify(userAddress, metaTx, sigR, sigS, sigV), + "Signer and signature do not match" + ); + + // increase nonce for user (to avoid re-use) + nonces[userAddress] = nonces[userAddress].add(1); + + emit MetaTransactionExecuted( + userAddress, + msg.sender, + functionSignature + ); + + // Append userAddress and relayer address at the end to extract it from calling context + (bool success, bytes memory returnData) = address(this).call( + abi.encodePacked(functionSignature, userAddress) + ); + require(success, "Function call not successful"); + + return returnData; + } + + function hashMetaTransaction(MetaTransaction memory metaTx) + internal + pure + returns (bytes32) + { + return + keccak256( + abi.encode( + META_TRANSACTION_TYPEHASH, + metaTx.nonce, + metaTx.from, + keccak256(metaTx.functionSignature) + ) + ); + } + + function getNonce(address user) public view returns (uint256 nonce) { + nonce = nonces[user]; + } + + function verify( + address signer, + MetaTransaction memory metaTx, + bytes32 sigR, + bytes32 sigS, + uint8 sigV + ) internal view returns (bool) { + require(signer != address(0), "NativeMetaTransaction: INVALID_SIGNER"); + return + signer == + ecrecover( + toTypedMessageHash(hashMetaTransaction(metaTx)), + sigV, + sigR, + sigS + ); + } +} + +// File: contracts/common/ContextMixin.sol + +pragma solidity 0.6.6; + +abstract contract ContextMixin { + function msgSender() + internal + view + returns (address payable sender) + { + if (msg.sender == address(this)) { + bytes memory array = msg.data; + uint256 index = msg.data.length; + assembly { + // Load the 32 bytes word from memory with the address on the lower 20 bytes, and mask those. + sender := and( + mload(add(array, index)), + 0xffffffffffffffffffffffffffffffffffffffff + ) + } + } else { + sender = msg.sender; + } + return sender; + } +} + +// File: contracts/child/ChildToken/ChildERC20.sol + +pragma solidity 0.6.6; + + + + + + + +contract ChildERC20 is + ERC20, + IChildToken, + AccessControlMixin, + NativeMetaTransaction, + ContextMixin +{ + bytes32 public constant DEPOSITOR_ROLE = keccak256("DEPOSITOR_ROLE"); + + constructor( + string memory name_, + string memory symbol_, + uint8 decimals_, + address childChainManager + ) public ERC20(name_, symbol_) { + _setupContractId("ChildERC20"); + _setupDecimals(decimals_); + _setupRole(DEFAULT_ADMIN_ROLE, _msgSender()); + _setupRole(DEPOSITOR_ROLE, childChainManager); + _initializeEIP712(name_); + } + + // This is to support Native meta transactions + // never use msg.sender directly, use _msgSender() instead + function _msgSender() + internal + override + view + returns (address payable sender) + { + return ContextMixin.msgSender(); + } + + /** + * @notice called when token is deposited on root chain + * @dev Should be callable only by ChildChainManager + * Should handle deposit by minting the required amount for user + * Make sure minting is done only by this function + * @param user user address for whom deposit is being done + * @param depositData abi encoded amount + */ + function deposit(address user, bytes calldata depositData) + external + override + only(DEPOSITOR_ROLE) + { + uint256 amount = abi.decode(depositData, (uint256)); + _mint(user, amount); + } + + /** + * @notice called when user wants to withdraw tokens back to root chain + * @dev Should burn user's tokens. This transaction will be verified when exiting on root chain + * @param amount amount of tokens to withdraw + */ + function withdraw(uint256 amount) external { + _burn(_msgSender(), amount); + } +} diff --git a/contracts/flat/ChildMintableERC20.sol b/contracts/flat/ChildMintableERC20.sol new file mode 100644 index 0000000000..da34b5c6f0 --- /dev/null +++ b/contracts/flat/ChildMintableERC20.sol @@ -0,0 +1,1508 @@ + +// File: @openzeppelin/contracts/GSN/Context.sol + +// SPDX-License-Identifier: MIT + +pragma solidity ^0.6.0; + +/* + * @dev Provides information about the current execution context, including the + * sender of the transaction and its data. While these are generally available + * via msg.sender and msg.data, they should not be accessed in such a direct + * manner, since when dealing with GSN meta-transactions the account sending and + * paying for execution may not be the actual sender (as far as an application + * is concerned). + * + * This contract is only required for intermediate, library-like contracts. + */ +abstract contract Context { + function _msgSender() internal view virtual returns (address payable) { + return msg.sender; + } + + function _msgData() internal view virtual returns (bytes memory) { + this; // silence state mutability warning without generating bytecode - see https://github.com/ethereum/solidity/issues/2691 + return msg.data; + } +} + +// File: @openzeppelin/contracts/token/ERC20/IERC20.sol + +pragma solidity ^0.6.0; + +/** + * @dev Interface of the ERC20 standard as defined in the EIP. + */ +interface IERC20 { + /** + * @dev Returns the amount of tokens in existence. + */ + function totalSupply() external view returns (uint256); + + /** + * @dev Returns the amount of tokens owned by `account`. + */ + function balanceOf(address account) external view returns (uint256); + + /** + * @dev Moves `amount` tokens from the caller's account to `recipient`. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * Emits a {Transfer} event. + */ + function transfer(address recipient, uint256 amount) external returns (bool); + + /** + * @dev Returns the remaining number of tokens that `spender` will be + * allowed to spend on behalf of `owner` through {transferFrom}. This is + * zero by default. + * + * This value changes when {approve} or {transferFrom} are called. + */ + function allowance(address owner, address spender) external view returns (uint256); + + /** + * @dev Sets `amount` as the allowance of `spender` over the caller's tokens. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * IMPORTANT: Beware that changing an allowance with this method brings the risk + * that someone may use both the old and the new allowance by unfortunate + * transaction ordering. One possible solution to mitigate this race + * condition is to first reduce the spender's allowance to 0 and set the + * desired value afterwards: + * https://github.com/ethereum/EIPs/issues/20#issuecomment-263524729 + * + * Emits an {Approval} event. + */ + function approve(address spender, uint256 amount) external returns (bool); + + /** + * @dev Moves `amount` tokens from `sender` to `recipient` using the + * allowance mechanism. `amount` is then deducted from the caller's + * allowance. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * Emits a {Transfer} event. + */ + function transferFrom(address sender, address recipient, uint256 amount) external returns (bool); + + /** + * @dev Emitted when `value` tokens are moved from one account (`from`) to + * another (`to`). + * + * Note that `value` may be zero. + */ + event Transfer(address indexed from, address indexed to, uint256 value); + + /** + * @dev Emitted when the allowance of a `spender` for an `owner` is set by + * a call to {approve}. `value` is the new allowance. + */ + event Approval(address indexed owner, address indexed spender, uint256 value); +} + +// File: @openzeppelin/contracts/math/SafeMath.sol + +pragma solidity ^0.6.0; + +/** + * @dev Wrappers over Solidity's arithmetic operations with added overflow + * checks. + * + * Arithmetic operations in Solidity wrap on overflow. This can easily result + * in bugs, because programmers usually assume that an overflow raises an + * error, which is the standard behavior in high level programming languages. + * `SafeMath` restores this intuition by reverting the transaction when an + * operation overflows. + * + * Using this library instead of the unchecked operations eliminates an entire + * class of bugs, so it's recommended to use it always. + */ +library SafeMath { + /** + * @dev Returns the addition of two unsigned integers, reverting on + * overflow. + * + * Counterpart to Solidity's `+` operator. + * + * Requirements: + * + * - Addition cannot overflow. + */ + function add(uint256 a, uint256 b) internal pure returns (uint256) { + uint256 c = a + b; + require(c >= a, "SafeMath: addition overflow"); + + return c; + } + + /** + * @dev Returns the subtraction of two unsigned integers, reverting on + * overflow (when the result is negative). + * + * Counterpart to Solidity's `-` operator. + * + * Requirements: + * + * - Subtraction cannot overflow. + */ + function sub(uint256 a, uint256 b) internal pure returns (uint256) { + return sub(a, b, "SafeMath: subtraction overflow"); + } + + /** + * @dev Returns the subtraction of two unsigned integers, reverting with custom message on + * overflow (when the result is negative). + * + * Counterpart to Solidity's `-` operator. + * + * Requirements: + * + * - Subtraction cannot overflow. + */ + function sub(uint256 a, uint256 b, string memory errorMessage) internal pure returns (uint256) { + require(b <= a, errorMessage); + uint256 c = a - b; + + return c; + } + + /** + * @dev Returns the multiplication of two unsigned integers, reverting on + * overflow. + * + * Counterpart to Solidity's `*` operator. + * + * Requirements: + * + * - Multiplication cannot overflow. + */ + function mul(uint256 a, uint256 b) internal pure returns (uint256) { + // Gas optimization: this is cheaper than requiring 'a' not being zero, but the + // benefit is lost if 'b' is also tested. + // See: https://github.com/OpenZeppelin/openzeppelin-contracts/pull/522 + if (a == 0) { + return 0; + } + + uint256 c = a * b; + require(c / a == b, "SafeMath: multiplication overflow"); + + return c; + } + + /** + * @dev Returns the integer division of two unsigned integers. Reverts on + * division by zero. The result is rounded towards zero. + * + * Counterpart to Solidity's `/` operator. Note: this function uses a + * `revert` opcode (which leaves remaining gas untouched) while Solidity + * uses an invalid opcode to revert (consuming all remaining gas). + * + * Requirements: + * + * - The divisor cannot be zero. + */ + function div(uint256 a, uint256 b) internal pure returns (uint256) { + return div(a, b, "SafeMath: division by zero"); + } + + /** + * @dev Returns the integer division of two unsigned integers. Reverts with custom message on + * division by zero. The result is rounded towards zero. + * + * Counterpart to Solidity's `/` operator. Note: this function uses a + * `revert` opcode (which leaves remaining gas untouched) while Solidity + * uses an invalid opcode to revert (consuming all remaining gas). + * + * Requirements: + * + * - The divisor cannot be zero. + */ + function div(uint256 a, uint256 b, string memory errorMessage) internal pure returns (uint256) { + require(b > 0, errorMessage); + uint256 c = a / b; + // assert(a == b * c + a % b); // There is no case in which this doesn't hold + + return c; + } + + /** + * @dev Returns the remainder of dividing two unsigned integers. (unsigned integer modulo), + * Reverts when dividing by zero. + * + * Counterpart to Solidity's `%` operator. This function uses a `revert` + * opcode (which leaves remaining gas untouched) while Solidity uses an + * invalid opcode to revert (consuming all remaining gas). + * + * Requirements: + * + * - The divisor cannot be zero. + */ + function mod(uint256 a, uint256 b) internal pure returns (uint256) { + return mod(a, b, "SafeMath: modulo by zero"); + } + + /** + * @dev Returns the remainder of dividing two unsigned integers. (unsigned integer modulo), + * Reverts with custom message when dividing by zero. + * + * Counterpart to Solidity's `%` operator. This function uses a `revert` + * opcode (which leaves remaining gas untouched) while Solidity uses an + * invalid opcode to revert (consuming all remaining gas). + * + * Requirements: + * + * - The divisor cannot be zero. + */ + function mod(uint256 a, uint256 b, string memory errorMessage) internal pure returns (uint256) { + require(b != 0, errorMessage); + return a % b; + } +} + +// File: @openzeppelin/contracts/utils/Address.sol + +pragma solidity ^0.6.2; + +/** + * @dev Collection of functions related to the address type + */ +library Address { + /** + * @dev Returns true if `account` is a contract. + * + * [IMPORTANT] + * ==== + * It is unsafe to assume that an address for which this function returns + * false is an externally-owned account (EOA) and not a contract. + * + * Among others, `isContract` will return false for the following + * types of addresses: + * + * - an externally-owned account + * - a contract in construction + * - an address where a contract will be created + * - an address where a contract lived, but was destroyed + * ==== + */ + function isContract(address account) internal view returns (bool) { + // According to EIP-1052, 0x0 is the value returned for not-yet created accounts + // and 0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470 is returned + // for accounts without code, i.e. `keccak256('')` + bytes32 codehash; + bytes32 accountHash = 0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470; + // solhint-disable-next-line no-inline-assembly + assembly { codehash := extcodehash(account) } + return (codehash != accountHash && codehash != 0x0); + } + + /** + * @dev Replacement for Solidity's `transfer`: sends `amount` wei to + * `recipient`, forwarding all available gas and reverting on errors. + * + * https://eips.ethereum.org/EIPS/eip-1884[EIP1884] increases the gas cost + * of certain opcodes, possibly making contracts go over the 2300 gas limit + * imposed by `transfer`, making them unable to receive funds via + * `transfer`. {sendValue} removes this limitation. + * + * https://diligence.consensys.net/posts/2019/09/stop-using-soliditys-transfer-now/[Learn more]. + * + * IMPORTANT: because control is transferred to `recipient`, care must be + * taken to not create reentrancy vulnerabilities. Consider using + * {ReentrancyGuard} or the + * https://solidity.readthedocs.io/en/v0.5.11/security-considerations.html#use-the-checks-effects-interactions-pattern[checks-effects-interactions pattern]. + */ + function sendValue(address payable recipient, uint256 amount) internal { + require(address(this).balance >= amount, "Address: insufficient balance"); + + // solhint-disable-next-line avoid-low-level-calls, avoid-call-value + (bool success, ) = recipient.call{ value: amount }(""); + require(success, "Address: unable to send value, recipient may have reverted"); + } + + /** + * @dev Performs a Solidity function call using a low level `call`. A + * plain`call` is an unsafe replacement for a function call: use this + * function instead. + * + * If `target` reverts with a revert reason, it is bubbled up by this + * function (like regular Solidity function calls). + * + * Returns the raw returned data. To convert to the expected return value, + * use https://solidity.readthedocs.io/en/latest/units-and-global-variables.html?highlight=abi.decode#abi-encoding-and-decoding-functions[`abi.decode`]. + * + * Requirements: + * + * - `target` must be a contract. + * - calling `target` with `data` must not revert. + * + * _Available since v3.1._ + */ + function functionCall(address target, bytes memory data) internal returns (bytes memory) { + return functionCall(target, data, "Address: low-level call failed"); + } + + /** + * @dev Same as {xref-Address-functionCall-address-bytes-}[`functionCall`], but with + * `errorMessage` as a fallback revert reason when `target` reverts. + * + * _Available since v3.1._ + */ + function functionCall(address target, bytes memory data, string memory errorMessage) internal returns (bytes memory) { + return _functionCallWithValue(target, data, 0, errorMessage); + } + + /** + * @dev Same as {xref-Address-functionCall-address-bytes-}[`functionCall`], + * but also transferring `value` wei to `target`. + * + * Requirements: + * + * - the calling contract must have an ETH balance of at least `value`. + * - the called Solidity function must be `payable`. + * + * _Available since v3.1._ + */ + function functionCallWithValue(address target, bytes memory data, uint256 value) internal returns (bytes memory) { + return functionCallWithValue(target, data, value, "Address: low-level call with value failed"); + } + + /** + * @dev Same as {xref-Address-functionCallWithValue-address-bytes-uint256-}[`functionCallWithValue`], but + * with `errorMessage` as a fallback revert reason when `target` reverts. + * + * _Available since v3.1._ + */ + function functionCallWithValue(address target, bytes memory data, uint256 value, string memory errorMessage) internal returns (bytes memory) { + require(address(this).balance >= value, "Address: insufficient balance for call"); + return _functionCallWithValue(target, data, value, errorMessage); + } + + function _functionCallWithValue(address target, bytes memory data, uint256 weiValue, string memory errorMessage) private returns (bytes memory) { + require(isContract(target), "Address: call to non-contract"); + + // solhint-disable-next-line avoid-low-level-calls + (bool success, bytes memory returndata) = target.call{ value: weiValue }(data); + if (success) { + return returndata; + } else { + // Look for revert reason and bubble it up if present + if (returndata.length > 0) { + // The easiest way to bubble the revert reason is using memory via assembly + + // solhint-disable-next-line no-inline-assembly + assembly { + let returndata_size := mload(returndata) + revert(add(32, returndata), returndata_size) + } + } else { + revert(errorMessage); + } + } + } +} + +// File: @openzeppelin/contracts/token/ERC20/ERC20.sol + +pragma solidity ^0.6.0; + + + + + +/** + * @dev Implementation of the {IERC20} interface. + * + * This implementation is agnostic to the way tokens are created. This means + * that a supply mechanism has to be added in a derived contract using {_mint}. + * For a generic mechanism see {ERC20PresetMinterPauser}. + * + * TIP: For a detailed writeup see our guide + * https://forum.zeppelin.solutions/t/how-to-implement-erc20-supply-mechanisms/226[How + * to implement supply mechanisms]. + * + * We have followed general OpenZeppelin guidelines: functions revert instead + * of returning `false` on failure. This behavior is nonetheless conventional + * and does not conflict with the expectations of ERC20 applications. + * + * Additionally, an {Approval} event is emitted on calls to {transferFrom}. + * This allows applications to reconstruct the allowance for all accounts just + * by listening to said events. Other implementations of the EIP may not emit + * these events, as it isn't required by the specification. + * + * Finally, the non-standard {decreaseAllowance} and {increaseAllowance} + * functions have been added to mitigate the well-known issues around setting + * allowances. See {IERC20-approve}. + */ +contract ERC20 is Context, IERC20 { + using SafeMath for uint256; + using Address for address; + + mapping (address => uint256) private _balances; + + mapping (address => mapping (address => uint256)) private _allowances; + + uint256 private _totalSupply; + + string private _name; + string private _symbol; + uint8 private _decimals; + + /** + * @dev Sets the values for {name} and {symbol}, initializes {decimals} with + * a default value of 18. + * + * To select a different value for {decimals}, use {_setupDecimals}. + * + * All three of these values are immutable: they can only be set once during + * construction. + */ + constructor (string memory name, string memory symbol) public { + _name = name; + _symbol = symbol; + _decimals = 18; + } + + /** + * @dev Returns the name of the token. + */ + function name() public view returns (string memory) { + return _name; + } + + /** + * @dev Returns the symbol of the token, usually a shorter version of the + * name. + */ + function symbol() public view returns (string memory) { + return _symbol; + } + + /** + * @dev Returns the number of decimals used to get its user representation. + * For example, if `decimals` equals `2`, a balance of `505` tokens should + * be displayed to a user as `5,05` (`505 / 10 ** 2`). + * + * Tokens usually opt for a value of 18, imitating the relationship between + * Ether and Wei. This is the value {ERC20} uses, unless {_setupDecimals} is + * called. + * + * NOTE: This information is only used for _display_ purposes: it in + * no way affects any of the arithmetic of the contract, including + * {IERC20-balanceOf} and {IERC20-transfer}. + */ + function decimals() public view returns (uint8) { + return _decimals; + } + + /** + * @dev See {IERC20-totalSupply}. + */ + function totalSupply() public view override returns (uint256) { + return _totalSupply; + } + + /** + * @dev See {IERC20-balanceOf}. + */ + function balanceOf(address account) public view override returns (uint256) { + return _balances[account]; + } + + /** + * @dev See {IERC20-transfer}. + * + * Requirements: + * + * - `recipient` cannot be the zero address. + * - the caller must have a balance of at least `amount`. + */ + function transfer(address recipient, uint256 amount) public virtual override returns (bool) { + _transfer(_msgSender(), recipient, amount); + return true; + } + + /** + * @dev See {IERC20-allowance}. + */ + function allowance(address owner, address spender) public view virtual override returns (uint256) { + return _allowances[owner][spender]; + } + + /** + * @dev See {IERC20-approve}. + * + * Requirements: + * + * - `spender` cannot be the zero address. + */ + function approve(address spender, uint256 amount) public virtual override returns (bool) { + _approve(_msgSender(), spender, amount); + return true; + } + + /** + * @dev See {IERC20-transferFrom}. + * + * Emits an {Approval} event indicating the updated allowance. This is not + * required by the EIP. See the note at the beginning of {ERC20}; + * + * Requirements: + * - `sender` and `recipient` cannot be the zero address. + * - `sender` must have a balance of at least `amount`. + * - the caller must have allowance for ``sender``'s tokens of at least + * `amount`. + */ + function transferFrom(address sender, address recipient, uint256 amount) public virtual override returns (bool) { + _transfer(sender, recipient, amount); + _approve(sender, _msgSender(), _allowances[sender][_msgSender()].sub(amount, "ERC20: transfer amount exceeds allowance")); + return true; + } + + /** + * @dev Atomically increases the allowance granted to `spender` by the caller. + * + * This is an alternative to {approve} that can be used as a mitigation for + * problems described in {IERC20-approve}. + * + * Emits an {Approval} event indicating the updated allowance. + * + * Requirements: + * + * - `spender` cannot be the zero address. + */ + function increaseAllowance(address spender, uint256 addedValue) public virtual returns (bool) { + _approve(_msgSender(), spender, _allowances[_msgSender()][spender].add(addedValue)); + return true; + } + + /** + * @dev Atomically decreases the allowance granted to `spender` by the caller. + * + * This is an alternative to {approve} that can be used as a mitigation for + * problems described in {IERC20-approve}. + * + * Emits an {Approval} event indicating the updated allowance. + * + * Requirements: + * + * - `spender` cannot be the zero address. + * - `spender` must have allowance for the caller of at least + * `subtractedValue`. + */ + function decreaseAllowance(address spender, uint256 subtractedValue) public virtual returns (bool) { + _approve(_msgSender(), spender, _allowances[_msgSender()][spender].sub(subtractedValue, "ERC20: decreased allowance below zero")); + return true; + } + + /** + * @dev Moves tokens `amount` from `sender` to `recipient`. + * + * This is internal function is equivalent to {transfer}, and can be used to + * e.g. implement automatic token fees, slashing mechanisms, etc. + * + * Emits a {Transfer} event. + * + * Requirements: + * + * - `sender` cannot be the zero address. + * - `recipient` cannot be the zero address. + * - `sender` must have a balance of at least `amount`. + */ + function _transfer(address sender, address recipient, uint256 amount) internal virtual { + require(sender != address(0), "ERC20: transfer from the zero address"); + require(recipient != address(0), "ERC20: transfer to the zero address"); + + _beforeTokenTransfer(sender, recipient, amount); + + _balances[sender] = _balances[sender].sub(amount, "ERC20: transfer amount exceeds balance"); + _balances[recipient] = _balances[recipient].add(amount); + emit Transfer(sender, recipient, amount); + } + + /** @dev Creates `amount` tokens and assigns them to `account`, increasing + * the total supply. + * + * Emits a {Transfer} event with `from` set to the zero address. + * + * Requirements + * + * - `to` cannot be the zero address. + */ + function _mint(address account, uint256 amount) internal virtual { + require(account != address(0), "ERC20: mint to the zero address"); + + _beforeTokenTransfer(address(0), account, amount); + + _totalSupply = _totalSupply.add(amount); + _balances[account] = _balances[account].add(amount); + emit Transfer(address(0), account, amount); + } + + /** + * @dev Destroys `amount` tokens from `account`, reducing the + * total supply. + * + * Emits a {Transfer} event with `to` set to the zero address. + * + * Requirements + * + * - `account` cannot be the zero address. + * - `account` must have at least `amount` tokens. + */ + function _burn(address account, uint256 amount) internal virtual { + require(account != address(0), "ERC20: burn from the zero address"); + + _beforeTokenTransfer(account, address(0), amount); + + _balances[account] = _balances[account].sub(amount, "ERC20: burn amount exceeds balance"); + _totalSupply = _totalSupply.sub(amount); + emit Transfer(account, address(0), amount); + } + + /** + * @dev Sets `amount` as the allowance of `spender` over the `owner`s tokens. + * + * This is internal function is equivalent to `approve`, and can be used to + * e.g. set automatic allowances for certain subsystems, etc. + * + * Emits an {Approval} event. + * + * Requirements: + * + * - `owner` cannot be the zero address. + * - `spender` cannot be the zero address. + */ + function _approve(address owner, address spender, uint256 amount) internal virtual { + require(owner != address(0), "ERC20: approve from the zero address"); + require(spender != address(0), "ERC20: approve to the zero address"); + + _allowances[owner][spender] = amount; + emit Approval(owner, spender, amount); + } + + /** + * @dev Sets {decimals} to a value other than the default one of 18. + * + * WARNING: This function should only be called from the constructor. Most + * applications that interact with token contracts will not expect + * {decimals} to ever change, and may work incorrectly if it does. + */ + function _setupDecimals(uint8 decimals_) internal { + _decimals = decimals_; + } + + /** + * @dev Hook that is called before any transfer of tokens. This includes + * minting and burning. + * + * Calling conditions: + * + * - when `from` and `to` are both non-zero, `amount` of ``from``'s tokens + * will be to transferred to `to`. + * - when `from` is zero, `amount` tokens will be minted for `to`. + * - when `to` is zero, `amount` of ``from``'s tokens will be burned. + * - `from` and `to` are never both zero. + * + * To learn more about hooks, head to xref:ROOT:extending-contracts.adoc#using-hooks[Using Hooks]. + */ + function _beforeTokenTransfer(address from, address to, uint256 amount) internal virtual { } +} + +// File: @openzeppelin/contracts/utils/EnumerableSet.sol + +pragma solidity ^0.6.0; + +/** + * @dev Library for managing + * https://en.wikipedia.org/wiki/Set_(abstract_data_type)[sets] of primitive + * types. + * + * Sets have the following properties: + * + * - Elements are added, removed, and checked for existence in constant time + * (O(1)). + * - Elements are enumerated in O(n). No guarantees are made on the ordering. + * + * ``` + * contract Example { + * // Add the library methods + * using EnumerableSet for EnumerableSet.AddressSet; + * + * // Declare a set state variable + * EnumerableSet.AddressSet private mySet; + * } + * ``` + * + * As of v3.0.0, only sets of type `address` (`AddressSet`) and `uint256` + * (`UintSet`) are supported. + */ +library EnumerableSet { + // To implement this library for multiple types with as little code + // repetition as possible, we write it in terms of a generic Set type with + // bytes32 values. + // The Set implementation uses private functions, and user-facing + // implementations (such as AddressSet) are just wrappers around the + // underlying Set. + // This means that we can only create new EnumerableSets for types that fit + // in bytes32. + + struct Set { + // Storage of set values + bytes32[] _values; + + // Position of the value in the `values` array, plus 1 because index 0 + // means a value is not in the set. + mapping (bytes32 => uint256) _indexes; + } + + /** + * @dev Add a value to a set. O(1). + * + * Returns true if the value was added to the set, that is if it was not + * already present. + */ + function _add(Set storage set, bytes32 value) private returns (bool) { + if (!_contains(set, value)) { + set._values.push(value); + // The value is stored at length-1, but we add 1 to all indexes + // and use 0 as a sentinel value + set._indexes[value] = set._values.length; + return true; + } else { + return false; + } + } + + /** + * @dev Removes a value from a set. O(1). + * + * Returns true if the value was removed from the set, that is if it was + * present. + */ + function _remove(Set storage set, bytes32 value) private returns (bool) { + // We read and store the value's index to prevent multiple reads from the same storage slot + uint256 valueIndex = set._indexes[value]; + + if (valueIndex != 0) { // Equivalent to contains(set, value) + // To delete an element from the _values array in O(1), we swap the element to delete with the last one in + // the array, and then remove the last element (sometimes called as 'swap and pop'). + // This modifies the order of the array, as noted in {at}. + + uint256 toDeleteIndex = valueIndex - 1; + uint256 lastIndex = set._values.length - 1; + + // When the value to delete is the last one, the swap operation is unnecessary. However, since this occurs + // so rarely, we still do the swap anyway to avoid the gas cost of adding an 'if' statement. + + bytes32 lastvalue = set._values[lastIndex]; + + // Move the last value to the index where the value to delete is + set._values[toDeleteIndex] = lastvalue; + // Update the index for the moved value + set._indexes[lastvalue] = toDeleteIndex + 1; // All indexes are 1-based + + // Delete the slot where the moved value was stored + set._values.pop(); + + // Delete the index for the deleted slot + delete set._indexes[value]; + + return true; + } else { + return false; + } + } + + /** + * @dev Returns true if the value is in the set. O(1). + */ + function _contains(Set storage set, bytes32 value) private view returns (bool) { + return set._indexes[value] != 0; + } + + /** + * @dev Returns the number of values on the set. O(1). + */ + function _length(Set storage set) private view returns (uint256) { + return set._values.length; + } + + /** + * @dev Returns the value stored at position `index` in the set. O(1). + * + * Note that there are no guarantees on the ordering of values inside the + * array, and it may change when more values are added or removed. + * + * Requirements: + * + * - `index` must be strictly less than {length}. + */ + function _at(Set storage set, uint256 index) private view returns (bytes32) { + require(set._values.length > index, "EnumerableSet: index out of bounds"); + return set._values[index]; + } + + // AddressSet + + struct AddressSet { + Set _inner; + } + + /** + * @dev Add a value to a set. O(1). + * + * Returns true if the value was added to the set, that is if it was not + * already present. + */ + function add(AddressSet storage set, address value) internal returns (bool) { + return _add(set._inner, bytes32(uint256(value))); + } + + /** + * @dev Removes a value from a set. O(1). + * + * Returns true if the value was removed from the set, that is if it was + * present. + */ + function remove(AddressSet storage set, address value) internal returns (bool) { + return _remove(set._inner, bytes32(uint256(value))); + } + + /** + * @dev Returns true if the value is in the set. O(1). + */ + function contains(AddressSet storage set, address value) internal view returns (bool) { + return _contains(set._inner, bytes32(uint256(value))); + } + + /** + * @dev Returns the number of values in the set. O(1). + */ + function length(AddressSet storage set) internal view returns (uint256) { + return _length(set._inner); + } + + /** + * @dev Returns the value stored at position `index` in the set. O(1). + * + * Note that there are no guarantees on the ordering of values inside the + * array, and it may change when more values are added or removed. + * + * Requirements: + * + * - `index` must be strictly less than {length}. + */ + function at(AddressSet storage set, uint256 index) internal view returns (address) { + return address(uint256(_at(set._inner, index))); + } + + + // UintSet + + struct UintSet { + Set _inner; + } + + /** + * @dev Add a value to a set. O(1). + * + * Returns true if the value was added to the set, that is if it was not + * already present. + */ + function add(UintSet storage set, uint256 value) internal returns (bool) { + return _add(set._inner, bytes32(value)); + } + + /** + * @dev Removes a value from a set. O(1). + * + * Returns true if the value was removed from the set, that is if it was + * present. + */ + function remove(UintSet storage set, uint256 value) internal returns (bool) { + return _remove(set._inner, bytes32(value)); + } + + /** + * @dev Returns true if the value is in the set. O(1). + */ + function contains(UintSet storage set, uint256 value) internal view returns (bool) { + return _contains(set._inner, bytes32(value)); + } + + /** + * @dev Returns the number of values on the set. O(1). + */ + function length(UintSet storage set) internal view returns (uint256) { + return _length(set._inner); + } + + /** + * @dev Returns the value stored at position `index` in the set. O(1). + * + * Note that there are no guarantees on the ordering of values inside the + * array, and it may change when more values are added or removed. + * + * Requirements: + * + * - `index` must be strictly less than {length}. + */ + function at(UintSet storage set, uint256 index) internal view returns (uint256) { + return uint256(_at(set._inner, index)); + } +} + +// File: @openzeppelin/contracts/access/AccessControl.sol + +pragma solidity ^0.6.0; + + + + +/** + * @dev Contract module that allows children to implement role-based access + * control mechanisms. + * + * Roles are referred to by their `bytes32` identifier. These should be exposed + * in the external API and be unique. The best way to achieve this is by + * using `public constant` hash digests: + * + * ``` + * bytes32 public constant MY_ROLE = keccak256("MY_ROLE"); + * ``` + * + * Roles can be used to represent a set of permissions. To restrict access to a + * function call, use {hasRole}: + * + * ``` + * function foo() public { + * require(hasRole(MY_ROLE, msg.sender)); + * ... + * } + * ``` + * + * Roles can be granted and revoked dynamically via the {grantRole} and + * {revokeRole} functions. Each role has an associated admin role, and only + * accounts that have a role's admin role can call {grantRole} and {revokeRole}. + * + * By default, the admin role for all roles is `DEFAULT_ADMIN_ROLE`, which means + * that only accounts with this role will be able to grant or revoke other + * roles. More complex role relationships can be created by using + * {_setRoleAdmin}. + * + * WARNING: The `DEFAULT_ADMIN_ROLE` is also its own admin: it has permission to + * grant and revoke this role. Extra precautions should be taken to secure + * accounts that have been granted it. + */ +abstract contract AccessControl is Context { + using EnumerableSet for EnumerableSet.AddressSet; + using Address for address; + + struct RoleData { + EnumerableSet.AddressSet members; + bytes32 adminRole; + } + + mapping (bytes32 => RoleData) private _roles; + + bytes32 public constant DEFAULT_ADMIN_ROLE = 0x00; + + /** + * @dev Emitted when `newAdminRole` is set as ``role``'s admin role, replacing `previousAdminRole` + * + * `DEFAULT_ADMIN_ROLE` is the starting admin for all roles, despite + * {RoleAdminChanged} not being emitted signaling this. + * + * _Available since v3.1._ + */ + event RoleAdminChanged(bytes32 indexed role, bytes32 indexed previousAdminRole, bytes32 indexed newAdminRole); + + /** + * @dev Emitted when `account` is granted `role`. + * + * `sender` is the account that originated the contract call, an admin role + * bearer except when using {_setupRole}. + */ + event RoleGranted(bytes32 indexed role, address indexed account, address indexed sender); + + /** + * @dev Emitted when `account` is revoked `role`. + * + * `sender` is the account that originated the contract call: + * - if using `revokeRole`, it is the admin role bearer + * - if using `renounceRole`, it is the role bearer (i.e. `account`) + */ + event RoleRevoked(bytes32 indexed role, address indexed account, address indexed sender); + + /** + * @dev Returns `true` if `account` has been granted `role`. + */ + function hasRole(bytes32 role, address account) public view returns (bool) { + return _roles[role].members.contains(account); + } + + /** + * @dev Returns the number of accounts that have `role`. Can be used + * together with {getRoleMember} to enumerate all bearers of a role. + */ + function getRoleMemberCount(bytes32 role) public view returns (uint256) { + return _roles[role].members.length(); + } + + /** + * @dev Returns one of the accounts that have `role`. `index` must be a + * value between 0 and {getRoleMemberCount}, non-inclusive. + * + * Role bearers are not sorted in any particular way, and their ordering may + * change at any point. + * + * WARNING: When using {getRoleMember} and {getRoleMemberCount}, make sure + * you perform all queries on the same block. See the following + * https://forum.openzeppelin.com/t/iterating-over-elements-on-enumerableset-in-openzeppelin-contracts/2296[forum post] + * for more information. + */ + function getRoleMember(bytes32 role, uint256 index) public view returns (address) { + return _roles[role].members.at(index); + } + + /** + * @dev Returns the admin role that controls `role`. See {grantRole} and + * {revokeRole}. + * + * To change a role's admin, use {_setRoleAdmin}. + */ + function getRoleAdmin(bytes32 role) public view returns (bytes32) { + return _roles[role].adminRole; + } + + /** + * @dev Grants `role` to `account`. + * + * If `account` had not been already granted `role`, emits a {RoleGranted} + * event. + * + * Requirements: + * + * - the caller must have ``role``'s admin role. + */ + function grantRole(bytes32 role, address account) public virtual { + require(hasRole(_roles[role].adminRole, _msgSender()), "AccessControl: sender must be an admin to grant"); + + _grantRole(role, account); + } + + /** + * @dev Revokes `role` from `account`. + * + * If `account` had been granted `role`, emits a {RoleRevoked} event. + * + * Requirements: + * + * - the caller must have ``role``'s admin role. + */ + function revokeRole(bytes32 role, address account) public virtual { + require(hasRole(_roles[role].adminRole, _msgSender()), "AccessControl: sender must be an admin to revoke"); + + _revokeRole(role, account); + } + + /** + * @dev Revokes `role` from the calling account. + * + * Roles are often managed via {grantRole} and {revokeRole}: this function's + * purpose is to provide a mechanism for accounts to lose their privileges + * if they are compromised (such as when a trusted device is misplaced). + * + * If the calling account had been granted `role`, emits a {RoleRevoked} + * event. + * + * Requirements: + * + * - the caller must be `account`. + */ + function renounceRole(bytes32 role, address account) public virtual { + require(account == _msgSender(), "AccessControl: can only renounce roles for self"); + + _revokeRole(role, account); + } + + /** + * @dev Grants `role` to `account`. + * + * If `account` had not been already granted `role`, emits a {RoleGranted} + * event. Note that unlike {grantRole}, this function doesn't perform any + * checks on the calling account. + * + * [WARNING] + * ==== + * This function should only be called from the constructor when setting + * up the initial roles for the system. + * + * Using this function in any other way is effectively circumventing the admin + * system imposed by {AccessControl}. + * ==== + */ + function _setupRole(bytes32 role, address account) internal virtual { + _grantRole(role, account); + } + + /** + * @dev Sets `adminRole` as ``role``'s admin role. + * + * Emits a {RoleAdminChanged} event. + */ + function _setRoleAdmin(bytes32 role, bytes32 adminRole) internal virtual { + emit RoleAdminChanged(role, _roles[role].adminRole, adminRole); + _roles[role].adminRole = adminRole; + } + + function _grantRole(bytes32 role, address account) private { + if (_roles[role].members.add(account)) { + emit RoleGranted(role, account, _msgSender()); + } + } + + function _revokeRole(bytes32 role, address account) private { + if (_roles[role].members.remove(account)) { + emit RoleRevoked(role, account, _msgSender()); + } + } +} + +// File: contracts/common/AccessControlMixin.sol + +pragma solidity 0.6.6; + + +contract AccessControlMixin is AccessControl { + string private _revertMsg; + function _setupContractId(string memory contractId) internal { + _revertMsg = string(abi.encodePacked(contractId, ": INSUFFICIENT_PERMISSIONS")); + } + + modifier only(bytes32 role) { + require( + hasRole(role, _msgSender()), + _revertMsg + ); + _; + } +} + +// File: contracts/child/ChildToken/IChildToken.sol + +pragma solidity 0.6.6; + +interface IChildToken { + function deposit(address user, bytes calldata depositData) external; +} + +// File: contracts/common/Initializable.sol + +pragma solidity 0.6.6; + +contract Initializable { + bool inited = false; + + modifier initializer() { + require(!inited, "already inited"); + _; + inited = true; + } +} + +// File: contracts/common/EIP712Base.sol + +pragma solidity 0.6.6; + + +contract EIP712Base is Initializable { + struct EIP712Domain { + string name; + string version; + address verifyingContract; + bytes32 salt; + } + + string constant public ERC712_VERSION = "1"; + + bytes32 internal constant EIP712_DOMAIN_TYPEHASH = keccak256( + bytes( + "EIP712Domain(string name,string version,address verifyingContract,bytes32 salt)" + ) + ); + bytes32 internal domainSeperator; + + // supposed to be called once while initializing. + // one of the contractsa that inherits this contract follows proxy pattern + // so it is not possible to do this in a constructor + function _initializeEIP712( + string memory name + ) + internal + initializer + { + _setDomainSeperator(name); + } + + function _setDomainSeperator(string memory name) internal { + domainSeperator = keccak256( + abi.encode( + EIP712_DOMAIN_TYPEHASH, + keccak256(bytes(name)), + keccak256(bytes(ERC712_VERSION)), + address(this), + bytes32(getChainId()) + ) + ); + } + + function getDomainSeperator() public view returns (bytes32) { + return domainSeperator; + } + + function getChainId() public pure returns (uint256) { + uint256 id; + assembly { + id := chainid() + } + return id; + } + + /** + * Accept message hash and returns hash message in EIP712 compatible form + * So that it can be used to recover signer from signature signed using EIP712 formatted data + * https://eips.ethereum.org/EIPS/eip-712 + * "\\x19" makes the encoding deterministic + * "\\x01" is the version byte to make it compatible to EIP-191 + */ + function toTypedMessageHash(bytes32 messageHash) + internal + view + returns (bytes32) + { + return + keccak256( + abi.encodePacked("\x19\x01", getDomainSeperator(), messageHash) + ); + } +} + +// File: contracts/common/NativeMetaTransaction.sol + +pragma solidity 0.6.6; + + + +contract NativeMetaTransaction is EIP712Base { + using SafeMath for uint256; + bytes32 private constant META_TRANSACTION_TYPEHASH = keccak256( + bytes( + "MetaTransaction(uint256 nonce,address from,bytes functionSignature)" + ) + ); + event MetaTransactionExecuted( + address userAddress, + address payable relayerAddress, + bytes functionSignature + ); + mapping(address => uint256) nonces; + + /* + * Meta transaction structure. + * No point of including value field here as if user is doing value transfer then he has the funds to pay for gas + * He should call the desired function directly in that case. + */ + struct MetaTransaction { + uint256 nonce; + address from; + bytes functionSignature; + } + + function executeMetaTransaction( + address userAddress, + bytes memory functionSignature, + bytes32 sigR, + bytes32 sigS, + uint8 sigV + ) public payable returns (bytes memory) { + MetaTransaction memory metaTx = MetaTransaction({ + nonce: nonces[userAddress], + from: userAddress, + functionSignature: functionSignature + }); + + require( + verify(userAddress, metaTx, sigR, sigS, sigV), + "Signer and signature do not match" + ); + + // increase nonce for user (to avoid re-use) + nonces[userAddress] = nonces[userAddress].add(1); + + emit MetaTransactionExecuted( + userAddress, + msg.sender, + functionSignature + ); + + // Append userAddress and relayer address at the end to extract it from calling context + (bool success, bytes memory returnData) = address(this).call( + abi.encodePacked(functionSignature, userAddress) + ); + require(success, "Function call not successful"); + + return returnData; + } + + function hashMetaTransaction(MetaTransaction memory metaTx) + internal + pure + returns (bytes32) + { + return + keccak256( + abi.encode( + META_TRANSACTION_TYPEHASH, + metaTx.nonce, + metaTx.from, + keccak256(metaTx.functionSignature) + ) + ); + } + + function getNonce(address user) public view returns (uint256 nonce) { + nonce = nonces[user]; + } + + function verify( + address signer, + MetaTransaction memory metaTx, + bytes32 sigR, + bytes32 sigS, + uint8 sigV + ) internal view returns (bool) { + require(signer != address(0), "NativeMetaTransaction: INVALID_SIGNER"); + return + signer == + ecrecover( + toTypedMessageHash(hashMetaTransaction(metaTx)), + sigV, + sigR, + sigS + ); + } +} + +// File: contracts/common/ContextMixin.sol + +pragma solidity 0.6.6; + +abstract contract ContextMixin { + function msgSender() + internal + view + returns (address payable sender) + { + if (msg.sender == address(this)) { + bytes memory array = msg.data; + uint256 index = msg.data.length; + assembly { + // Load the 32 bytes word from memory with the address on the lower 20 bytes, and mask those. + sender := and( + mload(add(array, index)), + 0xffffffffffffffffffffffffffffffffffffffff + ) + } + } else { + sender = msg.sender; + } + return sender; + } +} + +// File: contracts/child/ChildToken/ChildMintableERC20.sol + +pragma solidity 0.6.6; + + + + + + + +contract ChildMintableERC20 is + ERC20, + IChildToken, + AccessControlMixin, + NativeMetaTransaction, + ContextMixin +{ + bytes32 public constant DEPOSITOR_ROLE = keccak256("DEPOSITOR_ROLE"); + + constructor( + string memory name_, + string memory symbol_, + uint8 decimals_, + address childChainManager + ) public ERC20(name_, symbol_) { + _setupContractId("ChildMintableERC20"); + _setupDecimals(decimals_); + _setupRole(DEFAULT_ADMIN_ROLE, _msgSender()); + _setupRole(DEPOSITOR_ROLE, childChainManager); + _initializeEIP712(name_); + } + + // This is to support Native meta transactions + // never use msg.sender directly, use _msgSender() instead + function _msgSender() + internal + override + view + returns (address payable sender) + { + return ContextMixin.msgSender(); + } + + /** + * @notice called when token is deposited on root chain + * @dev Should be callable only by ChildChainManager + * Should handle deposit by minting the required amount for user + * Make sure minting is done only by this function + * @param user user address for whom deposit is being done + * @param depositData abi encoded amount + */ + function deposit(address user, bytes calldata depositData) + external + override + only(DEPOSITOR_ROLE) + { + uint256 amount = abi.decode(depositData, (uint256)); + _mint(user, amount); + } + + /** + * @notice called when user wants to withdraw tokens back to root chain + * @dev Should burn user's tokens. This transaction will be verified when exiting on root chain + * @param amount amount of tokens to withdraw + */ + function withdraw(uint256 amount) external { + _burn(_msgSender(), amount); + } + + /** + * @notice Example function to handle minting tokens on matic chain + * @dev Minting can be done as per requirement, + * This implementation allows only admin to mint tokens but it can be changed as per requirement + * @param user user for whom tokens are being minted + * @param amount amount of token to mint + */ + function mint(address user, uint256 amount) public only(DEFAULT_ADMIN_ROLE) { + _mint(user, amount); + } +} diff --git a/contracts/hardhat.config.js b/contracts/hardhat.config.js new file mode 100644 index 0000000000..b08c85db71 --- /dev/null +++ b/contracts/hardhat.config.js @@ -0,0 +1,11 @@ +const { etherscanApiKey } = require('./secrets.json'); +require("@nomiclabs/hardhat-etherscan"); + +module.exports = { + solidity: "0.6.12", + networks: { + }, + etherscan: { + apiKey: etherscanApiKey + } +}; diff --git a/contracts/migrations/2_deploy_child_erc20.js b/contracts/migrations/2_deploy_child_erc20.js new file mode 100644 index 0000000000..a976fc846c --- /dev/null +++ b/contracts/migrations/2_deploy_child_erc20.js @@ -0,0 +1,5 @@ +var ChildMintableERC20 = artifacts.require("ChildMintableERC20"); + +module.exports = function(deployer) { + deployer.deploy(ChildMintableERC20, 'BBGO', 'BBG', 18, '0xA6FA4fB5f76172d178d61B04b0ecd319C5d1C0aa'); +}; diff --git a/contracts/package-lock.json b/contracts/package-lock.json new file mode 100644 index 0000000000..34ec9674cd --- /dev/null +++ b/contracts/package-lock.json @@ -0,0 +1,32452 @@ +{ + "name": "bbgo-contracts", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "bbgo-contracts", + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "@truffle/hdwallet-provider": "1.4" + }, + "devDependencies": { + "@nomiclabs/hardhat-ethers": "^2.0.2", + "@nomiclabs/hardhat-etherscan": "^2.1.1", + "@nomiclabs/hardhat-waffle": "^2.0.1", + "@openzeppelin/contracts": "^3.2.0", + "chai": "^4.3.4", + "ethereum-waffle": "^3.4.0", + "ethers": "^5.4.7", + "hardhat": "^2.6.5", + "prettier": "^2.5.1", + "solhint": "^3.3.6", + "truffle-plugin-verify": "^0.5.18" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz", + "integrity": "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==", + "dependencies": { + "@babel/highlight": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.16.8.tgz", + "integrity": "sha512-m7OkX0IdKLKPpBlJtF561YJal5y/jyI5fNfWbPxh2D/nbzzGI4qRyrD8xO2jB24u7l+5I2a43scCG2IrfjC50Q==", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.16.7.tgz", + "integrity": "sha512-aeLaqcqThRNZYmbMqtulsetOQZ/5gbR/dWruUCJcpas4Qoyy+QeagfDsPdMrqwsPRDNxJvBlRiZxxX7THO7qtA==", + "peer": true, + "dependencies": { + "@babel/code-frame": "^7.16.7", + "@babel/generator": "^7.16.7", + "@babel/helper-compilation-targets": "^7.16.7", + "@babel/helper-module-transforms": "^7.16.7", + "@babel/helpers": "^7.16.7", + "@babel/parser": "^7.16.7", + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.16.7", + "@babel/types": "^7.16.7", + "convert-source-map": "^1.7.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.1.2", + "semver": "^6.3.0", + "source-map": "^0.5.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.16.8.tgz", + "integrity": "sha512-1ojZwE9+lOXzcWdWmO6TbUzDfqLD39CmEhN8+2cX9XkDo5yW1OpgfejfliysR2AWLpMamTiOiAp/mtroaymhpw==", + "dependencies": { + "@babel/types": "^7.16.8", + "jsesc": "^2.5.1", + "source-map": "^0.5.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.16.7.tgz", + "integrity": "sha512-mGojBwIWcwGD6rfqgRXVlVYmPAv7eOpIemUG3dGnDdCY4Pae70ROij3XmfrH6Fa1h1aiDylpglbZyktfzyo/hA==", + "dependencies": { + "@babel/compat-data": "^7.16.4", + "@babel/helper-validator-option": "^7.16.7", + "browserslist": "^4.17.5", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-define-polyfill-provider": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.1.tgz", + "integrity": "sha512-J9hGMpJQmtWmj46B3kBHmL38UhJGhYX7eqkcq+2gsstyYt341HmPeWspihX43yVRA0mS+8GGk2Gckc7bY/HCmA==", + "dependencies": { + "@babel/helper-compilation-targets": "^7.13.0", + "@babel/helper-module-imports": "^7.12.13", + "@babel/helper-plugin-utils": "^7.13.0", + "@babel/traverse": "^7.13.0", + "debug": "^4.1.1", + "lodash.debounce": "^4.0.8", + "resolve": "^1.14.2", + "semver": "^6.1.2" + }, + "peerDependencies": { + "@babel/core": "^7.4.0-0" + } + }, + "node_modules/@babel/helper-environment-visitor": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.16.7.tgz", + "integrity": "sha512-SLLb0AAn6PkUeAfKJCCOl9e1R53pQlGAfc4y4XuMRZfqeMYLE0dM1LMhqbGAlGQY0lfw5/ohoYWAe9V1yibRag==", + "dependencies": { + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-function-name": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.16.7.tgz", + "integrity": "sha512-QfDfEnIUyyBSR3HtrtGECuZ6DAyCkYFp7GHl75vFtTnn6pjKeK0T1DB5lLkFvBea8MdaiUABx3osbgLyInoejA==", + "dependencies": { + "@babel/helper-get-function-arity": "^7.16.7", + "@babel/template": "^7.16.7", + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-get-function-arity": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.16.7.tgz", + "integrity": "sha512-flc+RLSOBXzNzVhcLu6ujeHUrD6tANAOU5ojrRx/as+tbzf8+stUCj7+IfRRoAbEZqj/ahXEMsjhOhgeZsrnTw==", + "dependencies": { + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-hoist-variables": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.7.tgz", + "integrity": "sha512-m04d/0Op34H5v7pbZw6pSKP7weA6lsMvfiIAMeIvkY/R4xQtBSMFEigu9QTZ2qB/9l22vsxtM8a+Q8CzD255fg==", + "dependencies": { + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.16.7.tgz", + "integrity": "sha512-LVtS6TqjJHFc+nYeITRo6VLXve70xmq7wPhWTqDJusJEgGmkAACWwMiTNrvfoQo6hEhFwAIixNkvB0jPXDL8Wg==", + "dependencies": { + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.16.7.tgz", + "integrity": "sha512-gaqtLDxJEFCeQbYp9aLAefjhkKdjKcdh6DB7jniIGU3Pz52WAmP268zK0VgPz9hUNkMSYeH976K2/Y6yPadpng==", + "peer": true, + "dependencies": { + "@babel/helper-environment-visitor": "^7.16.7", + "@babel/helper-module-imports": "^7.16.7", + "@babel/helper-simple-access": "^7.16.7", + "@babel/helper-split-export-declaration": "^7.16.7", + "@babel/helper-validator-identifier": "^7.16.7", + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.16.7", + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.16.7.tgz", + "integrity": "sha512-Qg3Nk7ZxpgMrsox6HreY1ZNKdBq7K72tDSliA6dCl5f007jR4ne8iD5UzuNnCJH2xBf2BEEVGr+/OL6Gdp7RxA==", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-simple-access": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.16.7.tgz", + "integrity": "sha512-ZIzHVyoeLMvXMN/vok/a4LWRy8G2v205mNP0XOuf9XRLyX5/u9CnVulUtDgUTama3lT+bf/UqucuZjqiGuTS1g==", + "peer": true, + "dependencies": { + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-split-export-declaration": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.7.tgz", + "integrity": "sha512-xbWoy/PFoxSWazIToT9Sif+jJTlrMcndIsaOKvTA6u7QEo7ilkRZpjew18/W3c7nm8fXdUDXh02VXTbZ0pGDNw==", + "dependencies": { + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz", + "integrity": "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz", + "integrity": "sha512-TRtenOuRUVo9oIQGPC5G9DgK4743cdxvtOw0weQNpZXaS16SCBi5MNjZF8vba3ETURjZpTbVn7Vvcf2eAwFozQ==", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.16.7.tgz", + "integrity": "sha512-9ZDoqtfY7AuEOt3cxchfii6C7GDyyMBffktR5B2jvWv8u2+efwvpnVKXMWzNehqy68tKgAfSwfdw/lWpthS2bw==", + "peer": true, + "dependencies": { + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.16.7", + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.16.7.tgz", + "integrity": "sha512-aKpPMfLvGO3Q97V0qhw/V2SWNWlwfJknuwAunU7wZLSfrM4xTBvg7E5opUVi1kJTBKihE38CPg4nBiqX83PWYw==", + "dependencies": { + "@babel/helper-validator-identifier": "^7.16.7", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.16.8.tgz", + "integrity": "sha512-i7jDUfrVBWc+7OKcBzEe5n7fbv3i2fWtxKzzCvOjnzSxMfWMigAhtfJ7qzZNGFNMsCCd67+uz553dYKWXPvCKw==", + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-runtime": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.16.8.tgz", + "integrity": "sha512-6Kg2XHPFnIarNweZxmzbgYnnWsXxkx9WQUVk2sksBRL80lBC1RAQV3wQagWxdCHiYHqPN+oenwNIuttlYgIbQQ==", + "dependencies": { + "@babel/helper-module-imports": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "babel-plugin-polyfill-corejs2": "^0.3.0", + "babel-plugin-polyfill-corejs3": "^0.5.0", + "babel-plugin-polyfill-regenerator": "^0.3.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/runtime": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.16.7.tgz", + "integrity": "sha512-9E9FJowqAsytyOY6LG+1KuueckRL+aQW+mKvXRXnuFGyRAyepJPmEo9vgMfXUA6O9u3IeEdv9MAkppFcaQwogQ==", + "dependencies": { + "regenerator-runtime": "^0.13.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/template": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.16.7.tgz", + "integrity": "sha512-I8j/x8kHUrbYRTUxXrrMbfCa7jxkE7tZre39x3kjr9hvI82cK1FfqLygotcWN5kdPGWcLdWMHpSBavse5tWw3w==", + "dependencies": { + "@babel/code-frame": "^7.16.7", + "@babel/parser": "^7.16.7", + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.16.8.tgz", + "integrity": "sha512-xe+H7JlvKsDQwXRsBhSnq1/+9c+LlQcCK3Tn/l5sbx02HYns/cn7ibp9+RV1sIUqu7hKg91NWsgHurO9dowITQ==", + "dependencies": { + "@babel/code-frame": "^7.16.7", + "@babel/generator": "^7.16.8", + "@babel/helper-environment-visitor": "^7.16.7", + "@babel/helper-function-name": "^7.16.7", + "@babel/helper-hoist-variables": "^7.16.7", + "@babel/helper-split-export-declaration": "^7.16.7", + "@babel/parser": "^7.16.8", + "@babel/types": "^7.16.8", + "debug": "^4.1.0", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.16.8.tgz", + "integrity": "sha512-smN2DQc5s4M7fntyjGtyIPbRJv6wW4rU/94fmYJ7PKQuZkC0qGMHXJbg6sNGt12JmVr4k5YaptI/XtiLJBnmIg==", + "dependencies": { + "@babel/helper-validator-identifier": "^7.16.7", + "to-fast-properties": "^2.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@ensdomains/ens": { + "version": "0.4.5", + "resolved": "https://registry.npmjs.org/@ensdomains/ens/-/ens-0.4.5.tgz", + "integrity": "sha512-JSvpj1iNMFjK6K+uVl4unqMoa9rf5jopb8cya5UGBWz23Nw8hSNT7efgUx4BTlAPAgpNlEioUfeTyQ6J9ZvTVw==", + "deprecated": "Please use @ensdomains/ens-contracts", + "dev": true, + "dependencies": { + "bluebird": "^3.5.2", + "eth-ens-namehash": "^2.0.8", + "solc": "^0.4.20", + "testrpc": "0.0.1", + "web3-utils": "^1.0.0-beta.31" + } + }, + "node_modules/@ensdomains/ens/node_modules/ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@ensdomains/ens/node_modules/camelcase": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-3.0.0.tgz", + "integrity": "sha1-MvxLn82vhF/N9+c7uXysImHwqwo=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@ensdomains/ens/node_modules/cliui": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-3.2.0.tgz", + "integrity": "sha1-EgYBU3qRbSmUD5NNo7SNWFo5IT0=", + "dev": true, + "dependencies": { + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wrap-ansi": "^2.0.0" + } + }, + "node_modules/@ensdomains/ens/node_modules/fs-extra": { + "version": "0.30.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-0.30.0.tgz", + "integrity": "sha1-8jP/zAjU2n1DLapEl3aYnbHfk/A=", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.2", + "jsonfile": "^2.1.0", + "klaw": "^1.0.0", + "path-is-absolute": "^1.0.0", + "rimraf": "^2.2.8" + } + }, + "node_modules/@ensdomains/ens/node_modules/get-caller-file": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", + "integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==", + "dev": true + }, + "node_modules/@ensdomains/ens/node_modules/is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "dev": true, + "dependencies": { + "number-is-nan": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@ensdomains/ens/node_modules/jsonfile": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", + "integrity": "sha1-NzaitCi4e72gzIO1P6PWM6NcKug=", + "dev": true, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/@ensdomains/ens/node_modules/require-from-string": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-1.2.1.tgz", + "integrity": "sha1-UpyczvJzgK3+yaL5ZbZJu+5jZBg=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@ensdomains/ens/node_modules/require-main-filename": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz", + "integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=", + "dev": true + }, + "node_modules/@ensdomains/ens/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/@ensdomains/ens/node_modules/solc": { + "version": "0.4.26", + "resolved": "https://registry.npmjs.org/solc/-/solc-0.4.26.tgz", + "integrity": "sha512-o+c6FpkiHd+HPjmjEVpQgH7fqZ14tJpXhho+/bQXlXbliLIS/xjXb42Vxh+qQY1WCSTMQ0+a5vR9vi0MfhU6mA==", + "dev": true, + "dependencies": { + "fs-extra": "^0.30.0", + "memorystream": "^0.3.1", + "require-from-string": "^1.1.0", + "semver": "^5.3.0", + "yargs": "^4.7.1" + }, + "bin": { + "solcjs": "solcjs" + } + }, + "node_modules/@ensdomains/ens/node_modules/string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "dev": true, + "dependencies": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@ensdomains/ens/node_modules/strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, + "dependencies": { + "ansi-regex": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@ensdomains/ens/node_modules/which-module": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-1.0.0.tgz", + "integrity": "sha1-u6Y8qGGUiZT/MHc2CJ47lgJsKk8=", + "dev": true + }, + "node_modules/@ensdomains/ens/node_modules/wrap-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", + "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", + "dev": true, + "dependencies": { + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@ensdomains/ens/node_modules/y18n": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-3.2.2.tgz", + "integrity": "sha512-uGZHXkHnhF0XeeAPgnKfPv1bgKAYyVvmNL1xlKsPYZPaIHxGti2hHqvOCQv71XMsLxu1QjergkqogUnms5D3YQ==", + "dev": true + }, + "node_modules/@ensdomains/ens/node_modules/yargs": { + "version": "4.8.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-4.8.1.tgz", + "integrity": "sha1-wMQpJMpKqmsObaFznfshZDn53cA=", + "dev": true, + "dependencies": { + "cliui": "^3.2.0", + "decamelize": "^1.1.1", + "get-caller-file": "^1.0.1", + "lodash.assign": "^4.0.3", + "os-locale": "^1.4.0", + "read-pkg-up": "^1.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^1.0.1", + "set-blocking": "^2.0.0", + "string-width": "^1.0.1", + "which-module": "^1.0.0", + "window-size": "^0.2.0", + "y18n": "^3.2.1", + "yargs-parser": "^2.4.1" + } + }, + "node_modules/@ensdomains/ens/node_modules/yargs-parser": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-2.4.1.tgz", + "integrity": "sha1-hVaN488VD/SfpRgl8DqMiA3cxcQ=", + "dev": true, + "dependencies": { + "camelcase": "^3.0.0", + "lodash.assign": "^4.0.6" + } + }, + "node_modules/@ensdomains/resolver": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/@ensdomains/resolver/-/resolver-0.2.4.tgz", + "integrity": "sha512-bvaTH34PMCbv6anRa9I/0zjLJgY4EuznbEMgbV77JBCQ9KNC46rzi0avuxpOfu+xDjPEtSFGqVEOr5GlUSGudA==", + "deprecated": "Please use @ensdomains/ens-contracts", + "dev": true + }, + "node_modules/@ethereum-waffle/chai": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/@ethereum-waffle/chai/-/chai-3.4.1.tgz", + "integrity": "sha512-8mjgjWCe8XSCWuyJgVtJY8sm00VTczGBTDxBejgEBWN/J9x7QD8jdmWW8bfxdnqZbxiDCTvRFL58Wmd254BEqQ==", + "dev": true, + "dependencies": { + "@ethereum-waffle/provider": "^3.4.0", + "ethers": "^5.4.7" + }, + "engines": { + "node": ">=10.0" + } + }, + "node_modules/@ethereum-waffle/compiler": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/@ethereum-waffle/compiler/-/compiler-3.4.0.tgz", + "integrity": "sha512-a2wxGOoB9F1QFRE+Om7Cz2wn+pxM/o7a0a6cbwhaS2lECJgFzeN9xEkVrKahRkF4gEfXGcuORg4msP0Asxezlw==", + "dev": true, + "dependencies": { + "@resolver-engine/imports": "^0.3.3", + "@resolver-engine/imports-fs": "^0.3.3", + "@typechain/ethers-v5": "^2.0.0", + "@types/mkdirp": "^0.5.2", + "@types/node-fetch": "^2.5.5", + "ethers": "^5.0.1", + "mkdirp": "^0.5.1", + "node-fetch": "^2.6.1", + "solc": "^0.6.3", + "ts-generator": "^0.1.1", + "typechain": "^3.0.0" + }, + "engines": { + "node": ">=10.0" + } + }, + "node_modules/@ethereum-waffle/ens": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@ethereum-waffle/ens/-/ens-3.3.1.tgz", + "integrity": "sha512-xSjNWnT2Iwii3J3XGqD+F5yLEOzQzLHNLGfI5KIXdtQ4FHgReW/AMGRgPPLi+n+SP08oEQWJ3sEKrvbFlwJuaA==", + "dev": true, + "dependencies": { + "@ensdomains/ens": "^0.4.4", + "@ensdomains/resolver": "^0.2.4", + "ethers": "^5.5.2" + }, + "engines": { + "node": ">=10.0" + } + }, + "node_modules/@ethereum-waffle/mock-contract": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@ethereum-waffle/mock-contract/-/mock-contract-3.3.1.tgz", + "integrity": "sha512-h9yChF7IkpJLODg/o9/jlwKwTcXJLSEIq3gewgwUJuBHnhPkJGekcZvsTbximYc+e42QUZrDUATSuTCIryeCEA==", + "dev": true, + "dependencies": { + "@ethersproject/abi": "^5.5.0", + "ethers": "^5.5.2" + }, + "engines": { + "node": ">=10.0" + } + }, + "node_modules/@ethereum-waffle/provider": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/@ethereum-waffle/provider/-/provider-3.4.1.tgz", + "integrity": "sha512-5iDte7c9g9N1rTRE/P4npwk1Hus/wA2yH850X6sP30mr1IrwSG9NKn6/2SOQkAVJnh9jqyLVg2X9xCODWL8G4A==", + "dev": true, + "dependencies": { + "@ethereum-waffle/ens": "^3.3.1", + "ethers": "^5.5.2", + "ganache-core": "^2.13.2", + "patch-package": "^6.2.2", + "postinstall-postinstall": "^2.1.0" + }, + "engines": { + "node": ">=10.0" + } + }, + "node_modules/@ethereumjs/block": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@ethereumjs/block/-/block-3.6.0.tgz", + "integrity": "sha512-dqLo1LtsLG+Oelu5S5tWUDG0pah3QUwV5TJZy2cm19BXDr4ka/S9XBSgao0i09gTcuPlovlHgcs6d7EZ37urjQ==", + "dev": true, + "dependencies": { + "@ethereumjs/common": "^2.6.0", + "@ethereumjs/tx": "^3.4.0", + "ethereumjs-util": "^7.1.3", + "merkle-patricia-tree": "^4.2.2" + } + }, + "node_modules/@ethereumjs/block/node_modules/bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true + }, + "node_modules/@ethereumjs/block/node_modules/ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dev": true, + "dependencies": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/@ethereumjs/block/node_modules/level-ws": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/level-ws/-/level-ws-2.0.0.tgz", + "integrity": "sha512-1iv7VXx0G9ec1isqQZ7y5LmoZo/ewAsyDHNA8EFDW5hqH2Kqovm33nSFkSdnLLAK+I5FlT+lo5Cw9itGe+CpQA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "readable-stream": "^3.1.0", + "xtend": "^4.0.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@ethereumjs/block/node_modules/merkle-patricia-tree": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/merkle-patricia-tree/-/merkle-patricia-tree-4.2.2.tgz", + "integrity": "sha512-eqZYNTshcYx9aESkSPr71EqwsR/QmpnObDEV4iLxkt/x/IoLYZYjJvKY72voP/27Vy61iMOrfOG6jrn7ttXD+Q==", + "dev": true, + "dependencies": { + "@types/levelup": "^4.3.0", + "ethereumjs-util": "^7.1.2", + "level-mem": "^5.0.1", + "level-ws": "^2.0.0", + "readable-stream": "^3.6.0", + "rlp": "^2.2.4", + "semaphore-async-await": "^1.5.1" + } + }, + "node_modules/@ethereumjs/block/node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@ethereumjs/blockchain": { + "version": "5.5.1", + "resolved": "https://registry.npmjs.org/@ethereumjs/blockchain/-/blockchain-5.5.1.tgz", + "integrity": "sha512-JS2jeKxl3tlaa5oXrZ8mGoVBCz6YqsGG350XVNtHAtNZXKk7pU3rH4xzF2ru42fksMMqzFLzKh9l4EQzmNWDqA==", + "dev": true, + "dependencies": { + "@ethereumjs/block": "^3.6.0", + "@ethereumjs/common": "^2.6.0", + "@ethereumjs/ethash": "^1.1.0", + "debug": "^2.2.0", + "ethereumjs-util": "^7.1.3", + "level-mem": "^5.0.1", + "lru-cache": "^5.1.1", + "semaphore-async-await": "^1.5.1" + } + }, + "node_modules/@ethereumjs/blockchain/node_modules/bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true + }, + "node_modules/@ethereumjs/blockchain/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/@ethereumjs/blockchain/node_modules/ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dev": true, + "dependencies": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/@ethereumjs/blockchain/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + }, + "node_modules/@ethereumjs/common": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/@ethereumjs/common/-/common-2.6.0.tgz", + "integrity": "sha512-Cq2qS0FTu6O2VU1sgg+WyU9Ps0M6j/BEMHN+hRaECXCV/r0aI78u4N6p52QW/BDVhwWZpCdrvG8X7NJdzlpNUA==", + "dev": true, + "dependencies": { + "crc-32": "^1.2.0", + "ethereumjs-util": "^7.1.3" + } + }, + "node_modules/@ethereumjs/common/node_modules/bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true + }, + "node_modules/@ethereumjs/common/node_modules/ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dev": true, + "dependencies": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/@ethereumjs/ethash": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@ethereumjs/ethash/-/ethash-1.1.0.tgz", + "integrity": "sha512-/U7UOKW6BzpA+Vt+kISAoeDie1vAvY4Zy2KF5JJb+So7+1yKmJeJEHOGSnQIj330e9Zyl3L5Nae6VZyh2TJnAA==", + "dev": true, + "dependencies": { + "@ethereumjs/block": "^3.5.0", + "@types/levelup": "^4.3.0", + "buffer-xor": "^2.0.1", + "ethereumjs-util": "^7.1.1", + "miller-rabin": "^4.0.0" + } + }, + "node_modules/@ethereumjs/ethash/node_modules/bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true + }, + "node_modules/@ethereumjs/ethash/node_modules/buffer-xor": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-2.0.2.tgz", + "integrity": "sha512-eHslX0bin3GB+Lx2p7lEYRShRewuNZL3fUl4qlVJGGiwoPGftmt8JQgk2Y9Ji5/01TnVDo33E5b5O3vUB1HdqQ==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.1.1" + } + }, + "node_modules/@ethereumjs/ethash/node_modules/ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dev": true, + "dependencies": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/@ethereumjs/tx": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/@ethereumjs/tx/-/tx-3.4.0.tgz", + "integrity": "sha512-WWUwg1PdjHKZZxPPo274ZuPsJCWV3SqATrEKQP1n2DrVYVP1aZIYpo/mFaA0BDoE0tIQmBeimRCEA0Lgil+yYw==", + "dev": true, + "dependencies": { + "@ethereumjs/common": "^2.6.0", + "ethereumjs-util": "^7.1.3" + } + }, + "node_modules/@ethereumjs/tx/node_modules/bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true + }, + "node_modules/@ethereumjs/tx/node_modules/ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dev": true, + "dependencies": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/@ethereumjs/vm": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/@ethereumjs/vm/-/vm-5.6.0.tgz", + "integrity": "sha512-J2m/OgjjiGdWF2P9bj/4LnZQ1zRoZhY8mRNVw/N3tXliGI8ai1sI1mlDPkLpeUUM4vq54gH6n0ZlSpz8U/qlYQ==", + "dev": true, + "dependencies": { + "@ethereumjs/block": "^3.6.0", + "@ethereumjs/blockchain": "^5.5.0", + "@ethereumjs/common": "^2.6.0", + "@ethereumjs/tx": "^3.4.0", + "async-eventemitter": "^0.2.4", + "core-js-pure": "^3.0.1", + "debug": "^2.2.0", + "ethereumjs-util": "^7.1.3", + "functional-red-black-tree": "^1.0.1", + "mcl-wasm": "^0.7.1", + "merkle-patricia-tree": "^4.2.2", + "rustbn.js": "~0.2.0" + } + }, + "node_modules/@ethereumjs/vm/node_modules/bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true + }, + "node_modules/@ethereumjs/vm/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/@ethereumjs/vm/node_modules/ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dev": true, + "dependencies": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/@ethereumjs/vm/node_modules/level-ws": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/level-ws/-/level-ws-2.0.0.tgz", + "integrity": "sha512-1iv7VXx0G9ec1isqQZ7y5LmoZo/ewAsyDHNA8EFDW5hqH2Kqovm33nSFkSdnLLAK+I5FlT+lo5Cw9itGe+CpQA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "readable-stream": "^3.1.0", + "xtend": "^4.0.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@ethereumjs/vm/node_modules/merkle-patricia-tree": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/merkle-patricia-tree/-/merkle-patricia-tree-4.2.2.tgz", + "integrity": "sha512-eqZYNTshcYx9aESkSPr71EqwsR/QmpnObDEV4iLxkt/x/IoLYZYjJvKY72voP/27Vy61iMOrfOG6jrn7ttXD+Q==", + "dev": true, + "dependencies": { + "@types/levelup": "^4.3.0", + "ethereumjs-util": "^7.1.2", + "level-mem": "^5.0.1", + "level-ws": "^2.0.0", + "readable-stream": "^3.6.0", + "rlp": "^2.2.4", + "semaphore-async-await": "^1.5.1" + } + }, + "node_modules/@ethereumjs/vm/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + }, + "node_modules/@ethereumjs/vm/node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@ethersproject/abi": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/abi/-/abi-5.5.0.tgz", + "integrity": "sha512-loW7I4AohP5KycATvc0MgujU6JyCHPqHdeoo9z3Nr9xEiNioxa65ccdm1+fsoJhkuhdRtfcL8cfyGamz2AxZ5w==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/address": "^5.5.0", + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/constants": "^5.5.0", + "@ethersproject/hash": "^5.5.0", + "@ethersproject/keccak256": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/strings": "^5.5.0" + } + }, + "node_modules/@ethersproject/abstract-provider": { + "version": "5.5.1", + "resolved": "https://registry.npmjs.org/@ethersproject/abstract-provider/-/abstract-provider-5.5.1.tgz", + "integrity": "sha512-m+MA/ful6eKbxpr99xUYeRvLkfnlqzrF8SZ46d/xFB1A7ZVknYc/sXJG0RcufF52Qn2jeFj1hhcoQ7IXjNKUqg==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/networks": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/transactions": "^5.5.0", + "@ethersproject/web": "^5.5.0" + } + }, + "node_modules/@ethersproject/abstract-signer": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/abstract-signer/-/abstract-signer-5.5.0.tgz", + "integrity": "sha512-lj//7r250MXVLKI7sVarXAbZXbv9P50lgmJQGr2/is82EwEb8r7HrxsmMqAjTsztMYy7ohrIhGMIml+Gx4D3mA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/abstract-provider": "^5.5.0", + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0" + } + }, + "node_modules/@ethersproject/address": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/address/-/address-5.5.0.tgz", + "integrity": "sha512-l4Nj0eWlTUh6ro5IbPTgbpT4wRbdH5l8CQf7icF7sb/SI3Nhd9Y9HzhonTSTi6CefI0necIw7LJqQPopPLZyWw==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/keccak256": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/rlp": "^5.5.0" + } + }, + "node_modules/@ethersproject/base64": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/base64/-/base64-5.5.0.tgz", + "integrity": "sha512-tdayUKhU1ljrlHzEWbStXazDpsx4eg1dBXUSI6+mHlYklOXoXF6lZvw8tnD6oVaWfnMxAgRSKROg3cVKtCcppA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/bytes": "^5.5.0" + } + }, + "node_modules/@ethersproject/basex": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/basex/-/basex-5.5.0.tgz", + "integrity": "sha512-ZIodwhHpVJ0Y3hUCfUucmxKsWQA5TMnavp5j/UOuDdzZWzJlRmuOjcTMIGgHCYuZmHt36BfiSyQPSRskPxbfaQ==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/properties": "^5.5.0" + } + }, + "node_modules/@ethersproject/bignumber": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/bignumber/-/bignumber-5.5.0.tgz", + "integrity": "sha512-6Xytlwvy6Rn3U3gKEc1vP7nR92frHkv6wtVr95LFR3jREXiCPzdWxKQ1cx4JGQBXxcguAwjA8murlYN2TSiEbg==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "bn.js": "^4.11.9" + } + }, + "node_modules/@ethersproject/bytes": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/bytes/-/bytes-5.5.0.tgz", + "integrity": "sha512-ABvc7BHWhZU9PNM/tANm/Qx4ostPGadAuQzWTr3doklZOhDlmcBqclrQe/ZXUIj3K8wC28oYeuRa+A37tX9kog==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/logger": "^5.5.0" + } + }, + "node_modules/@ethersproject/constants": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/constants/-/constants-5.5.0.tgz", + "integrity": "sha512-2MsRRVChkvMWR+GyMGY4N1sAX9Mt3J9KykCsgUFd/1mwS0UH1qw+Bv9k1UJb3X3YJYFco9H20pjSlOIfCG5HYQ==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/bignumber": "^5.5.0" + } + }, + "node_modules/@ethersproject/contracts": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/contracts/-/contracts-5.5.0.tgz", + "integrity": "sha512-2viY7NzyvJkh+Ug17v7g3/IJC8HqZBDcOjYARZLdzRxrfGlRgmYgl6xPRKVbEzy1dWKw/iv7chDcS83pg6cLxg==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/abi": "^5.5.0", + "@ethersproject/abstract-provider": "^5.5.0", + "@ethersproject/abstract-signer": "^5.5.0", + "@ethersproject/address": "^5.5.0", + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/constants": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/transactions": "^5.5.0" + } + }, + "node_modules/@ethersproject/hash": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/hash/-/hash-5.5.0.tgz", + "integrity": "sha512-dnGVpK1WtBjmnp3mUT0PlU2MpapnwWI0PibldQEq1408tQBAbZpPidkWoVVuNMOl/lISO3+4hXZWCL3YV7qzfg==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/abstract-signer": "^5.5.0", + "@ethersproject/address": "^5.5.0", + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/keccak256": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/strings": "^5.5.0" + } + }, + "node_modules/@ethersproject/hdnode": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/hdnode/-/hdnode-5.5.0.tgz", + "integrity": "sha512-mcSOo9zeUg1L0CoJH7zmxwUG5ggQHU1UrRf8jyTYy6HxdZV+r0PBoL1bxr+JHIPXRzS6u/UW4mEn43y0tmyF8Q==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/abstract-signer": "^5.5.0", + "@ethersproject/basex": "^5.5.0", + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/pbkdf2": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/sha2": "^5.5.0", + "@ethersproject/signing-key": "^5.5.0", + "@ethersproject/strings": "^5.5.0", + "@ethersproject/transactions": "^5.5.0", + "@ethersproject/wordlists": "^5.5.0" + } + }, + "node_modules/@ethersproject/json-wallets": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/json-wallets/-/json-wallets-5.5.0.tgz", + "integrity": "sha512-9lA21XQnCdcS72xlBn1jfQdj2A1VUxZzOzi9UkNdnokNKke/9Ya2xA9aIK1SC3PQyBDLt4C+dfps7ULpkvKikQ==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/abstract-signer": "^5.5.0", + "@ethersproject/address": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/hdnode": "^5.5.0", + "@ethersproject/keccak256": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/pbkdf2": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/random": "^5.5.0", + "@ethersproject/strings": "^5.5.0", + "@ethersproject/transactions": "^5.5.0", + "aes-js": "3.0.0", + "scrypt-js": "3.0.1" + } + }, + "node_modules/@ethersproject/json-wallets/node_modules/aes-js": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-3.0.0.tgz", + "integrity": "sha1-4h3xCtbCBTKVvLuNq0Cwnb6ofk0=", + "dev": true + }, + "node_modules/@ethersproject/keccak256": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/keccak256/-/keccak256-5.5.0.tgz", + "integrity": "sha512-5VoFCTjo2rYbBe1l2f4mccaRFN/4VQEYFwwn04aJV2h7qf4ZvI2wFxUE1XOX+snbwCLRzIeikOqtAoPwMza9kg==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/bytes": "^5.5.0", + "js-sha3": "0.8.0" + } + }, + "node_modules/@ethersproject/logger": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/logger/-/logger-5.5.0.tgz", + "integrity": "sha512-rIY/6WPm7T8n3qS2vuHTUBPdXHl+rGxWxW5okDfo9J4Z0+gRRZT0msvUdIJkE4/HS29GUMziwGaaKO2bWONBrg==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ] + }, + "node_modules/@ethersproject/networks": { + "version": "5.5.2", + "resolved": "https://registry.npmjs.org/@ethersproject/networks/-/networks-5.5.2.tgz", + "integrity": "sha512-NEqPxbGBfy6O3x4ZTISb90SjEDkWYDUbEeIFhJly0F7sZjoQMnj5KYzMSkMkLKZ+1fGpx00EDpHQCy6PrDupkQ==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/logger": "^5.5.0" + } + }, + "node_modules/@ethersproject/pbkdf2": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/pbkdf2/-/pbkdf2-5.5.0.tgz", + "integrity": "sha512-SaDvQFvXPnz1QGpzr6/HToLifftSXGoXrbpZ6BvoZhmx4bNLHrxDe8MZisuecyOziP1aVEwzC2Hasj+86TgWVg==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/sha2": "^5.5.0" + } + }, + "node_modules/@ethersproject/properties": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/properties/-/properties-5.5.0.tgz", + "integrity": "sha512-l3zRQg3JkD8EL3CPjNK5g7kMx4qSwiR60/uk5IVjd3oq1MZR5qUg40CNOoEJoX5wc3DyY5bt9EbMk86C7x0DNA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/logger": "^5.5.0" + } + }, + "node_modules/@ethersproject/providers": { + "version": "5.5.2", + "resolved": "https://registry.npmjs.org/@ethersproject/providers/-/providers-5.5.2.tgz", + "integrity": "sha512-hkbx7x/MKcRjyrO4StKXCzCpWer6s97xnm34xkfPiarhtEUVAN4TBBpamM+z66WcTt7H5B53YwbRj1n7i8pZoQ==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/abstract-provider": "^5.5.0", + "@ethersproject/abstract-signer": "^5.5.0", + "@ethersproject/address": "^5.5.0", + "@ethersproject/basex": "^5.5.0", + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/constants": "^5.5.0", + "@ethersproject/hash": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/networks": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/random": "^5.5.0", + "@ethersproject/rlp": "^5.5.0", + "@ethersproject/sha2": "^5.5.0", + "@ethersproject/strings": "^5.5.0", + "@ethersproject/transactions": "^5.5.0", + "@ethersproject/web": "^5.5.0", + "bech32": "1.1.4", + "ws": "7.4.6" + } + }, + "node_modules/@ethersproject/providers/node_modules/ws": { + "version": "7.4.6", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.4.6.tgz", + "integrity": "sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A==", + "dev": true, + "engines": { + "node": ">=8.3.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": "^5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/@ethersproject/random": { + "version": "5.5.1", + "resolved": "https://registry.npmjs.org/@ethersproject/random/-/random-5.5.1.tgz", + "integrity": "sha512-YaU2dQ7DuhL5Au7KbcQLHxcRHfgyNgvFV4sQOo0HrtW3Zkrc9ctWNz8wXQ4uCSfSDsqX2vcjhroxU5RQRV0nqA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0" + } + }, + "node_modules/@ethersproject/rlp": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/rlp/-/rlp-5.5.0.tgz", + "integrity": "sha512-hLv8XaQ8PTI9g2RHoQGf/WSxBfTB/NudRacbzdxmst5VHAqd1sMibWG7SENzT5Dj3yZ3kJYx+WiRYEcQTAkcYA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0" + } + }, + "node_modules/@ethersproject/sha2": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/sha2/-/sha2-5.5.0.tgz", + "integrity": "sha512-B5UBoglbCiHamRVPLA110J+2uqsifpZaTmid2/7W5rbtYVz6gus6/hSDieIU/6gaKIDcOj12WnOdiymEUHIAOA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "hash.js": "1.1.7" + } + }, + "node_modules/@ethersproject/signing-key": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/signing-key/-/signing-key-5.5.0.tgz", + "integrity": "sha512-5VmseH7qjtNmDdZBswavhotYbWB0bOwKIlOTSlX14rKn5c11QmJwGt4GHeo7NrL/Ycl7uo9AHvEqs5xZgFBTng==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "bn.js": "^4.11.9", + "elliptic": "6.5.4", + "hash.js": "1.1.7" + } + }, + "node_modules/@ethersproject/solidity": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/solidity/-/solidity-5.5.0.tgz", + "integrity": "sha512-9NgZs9LhGMj6aCtHXhtmFQ4AN4sth5HuFXVvAQtzmm0jpSCNOTGtrHZJAeYTh7MBjRR8brylWZxBZR9zDStXbw==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/keccak256": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/sha2": "^5.5.0", + "@ethersproject/strings": "^5.5.0" + } + }, + "node_modules/@ethersproject/strings": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/strings/-/strings-5.5.0.tgz", + "integrity": "sha512-9fy3TtF5LrX/wTrBaT8FGE6TDJyVjOvXynXJz5MT5azq+E6D92zuKNx7i29sWW2FjVOaWjAsiZ1ZWznuduTIIQ==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/constants": "^5.5.0", + "@ethersproject/logger": "^5.5.0" + } + }, + "node_modules/@ethersproject/transactions": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/transactions/-/transactions-5.5.0.tgz", + "integrity": "sha512-9RZYSKX26KfzEd/1eqvv8pLauCKzDTub0Ko4LfIgaERvRuwyaNV78mJs7cpIgZaDl6RJui4o49lHwwCM0526zA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/address": "^5.5.0", + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/constants": "^5.5.0", + "@ethersproject/keccak256": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/rlp": "^5.5.0", + "@ethersproject/signing-key": "^5.5.0" + } + }, + "node_modules/@ethersproject/units": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/units/-/units-5.5.0.tgz", + "integrity": "sha512-7+DpjiZk4v6wrikj+TCyWWa9dXLNU73tSTa7n0TSJDxkYbV3Yf1eRh9ToMLlZtuctNYu9RDNNy2USq3AdqSbag==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/constants": "^5.5.0", + "@ethersproject/logger": "^5.5.0" + } + }, + "node_modules/@ethersproject/wallet": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/wallet/-/wallet-5.5.0.tgz", + "integrity": "sha512-Mlu13hIctSYaZmUOo7r2PhNSd8eaMPVXe1wxrz4w4FCE4tDYBywDH+bAR1Xz2ADyXGwqYMwstzTrtUVIsKDO0Q==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/abstract-provider": "^5.5.0", + "@ethersproject/abstract-signer": "^5.5.0", + "@ethersproject/address": "^5.5.0", + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/hash": "^5.5.0", + "@ethersproject/hdnode": "^5.5.0", + "@ethersproject/json-wallets": "^5.5.0", + "@ethersproject/keccak256": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/random": "^5.5.0", + "@ethersproject/signing-key": "^5.5.0", + "@ethersproject/transactions": "^5.5.0", + "@ethersproject/wordlists": "^5.5.0" + } + }, + "node_modules/@ethersproject/web": { + "version": "5.5.1", + "resolved": "https://registry.npmjs.org/@ethersproject/web/-/web-5.5.1.tgz", + "integrity": "sha512-olvLvc1CB12sREc1ROPSHTdFCdvMh0J5GSJYiQg2D0hdD4QmJDy8QYDb1CvoqD/bF1c++aeKv2sR5uduuG9dQg==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/base64": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/strings": "^5.5.0" + } + }, + "node_modules/@ethersproject/wordlists": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/wordlists/-/wordlists-5.5.0.tgz", + "integrity": "sha512-bL0UTReWDiaQJJYOC9sh/XcRu/9i2jMrzf8VLRmPKx58ckSlOJiohODkECCO50dtLZHcGU6MLXQ4OOrgBwP77Q==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/hash": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/strings": "^5.5.0" + } + }, + "node_modules/@nomiclabs/hardhat-ethers": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@nomiclabs/hardhat-ethers/-/hardhat-ethers-2.0.4.tgz", + "integrity": "sha512-7LMR344TkdCYkMVF9LuC9VU2NBIi84akQiwqm7OufpWaDgHbWhuanY53rk3SVAW0E4HBk5xn5wl5+bN5f+Mq5w==", + "dev": true, + "peerDependencies": { + "ethers": "^5.0.0", + "hardhat": "^2.0.0" + } + }, + "node_modules/@nomiclabs/hardhat-etherscan": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/@nomiclabs/hardhat-etherscan/-/hardhat-etherscan-2.1.8.tgz", + "integrity": "sha512-0+rj0SsZotVOcTLyDOxnOc3Gulo8upo0rsw/h+gBPcmtj91YqYJNhdARHoBxOhhE8z+5IUQPx+Dii04lXT14PA==", + "dev": true, + "dependencies": { + "@ethersproject/abi": "^5.1.2", + "@ethersproject/address": "^5.0.2", + "cbor": "^5.0.2", + "debug": "^4.1.1", + "fs-extra": "^7.0.1", + "node-fetch": "^2.6.0", + "semver": "^6.3.0" + }, + "peerDependencies": { + "hardhat": "^2.0.4" + } + }, + "node_modules/@nomiclabs/hardhat-waffle": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@nomiclabs/hardhat-waffle/-/hardhat-waffle-2.0.1.tgz", + "integrity": "sha512-2YR2V5zTiztSH9n8BYWgtv3Q+EL0N5Ltm1PAr5z20uAY4SkkfylJ98CIqt18XFvxTD5x4K2wKBzddjV9ViDAZQ==", + "dev": true, + "dependencies": { + "@types/sinon-chai": "^3.2.3", + "@types/web3": "1.0.19" + }, + "peerDependencies": { + "@nomiclabs/hardhat-ethers": "^2.0.0", + "ethereum-waffle": "^3.2.0", + "ethers": "^5.0.0", + "hardhat": "^2.0.0" + } + }, + "node_modules/@openzeppelin/contracts": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/@openzeppelin/contracts/-/contracts-3.4.2.tgz", + "integrity": "sha512-z0zMCjyhhp4y7XKAcDAi3Vgms4T2PstwBdahiO0+9NaGICQKjynK3wduSRplTgk4LXmoO1yfDGO5RbjKYxtuxA==", + "dev": true + }, + "node_modules/@resolver-engine/core": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@resolver-engine/core/-/core-0.3.3.tgz", + "integrity": "sha512-eB8nEbKDJJBi5p5SrvrvILn4a0h42bKtbCTri3ZxCGt6UvoQyp7HnGOfki944bUjBSHKK3RvgfViHn+kqdXtnQ==", + "dev": true, + "dependencies": { + "debug": "^3.1.0", + "is-url": "^1.2.4", + "request": "^2.85.0" + } + }, + "node_modules/@resolver-engine/core/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/@resolver-engine/fs": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@resolver-engine/fs/-/fs-0.3.3.tgz", + "integrity": "sha512-wQ9RhPUcny02Wm0IuJwYMyAG8fXVeKdmhm8xizNByD4ryZlx6PP6kRen+t/haF43cMfmaV7T3Cx6ChOdHEhFUQ==", + "dev": true, + "dependencies": { + "@resolver-engine/core": "^0.3.3", + "debug": "^3.1.0" + } + }, + "node_modules/@resolver-engine/fs/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/@resolver-engine/imports": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@resolver-engine/imports/-/imports-0.3.3.tgz", + "integrity": "sha512-anHpS4wN4sRMwsAbMXhMfOD/y4a4Oo0Cw/5+rue7hSwGWsDOQaAU1ClK1OxjUC35/peazxEl8JaSRRS+Xb8t3Q==", + "dev": true, + "dependencies": { + "@resolver-engine/core": "^0.3.3", + "debug": "^3.1.0", + "hosted-git-info": "^2.6.0", + "path-browserify": "^1.0.0", + "url": "^0.11.0" + } + }, + "node_modules/@resolver-engine/imports-fs": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@resolver-engine/imports-fs/-/imports-fs-0.3.3.tgz", + "integrity": "sha512-7Pjg/ZAZtxpeyCFlZR5zqYkz+Wdo84ugB5LApwriT8XFeQoLwGUj4tZFFvvCuxaNCcqZzCYbonJgmGObYBzyCA==", + "dev": true, + "dependencies": { + "@resolver-engine/fs": "^0.3.3", + "@resolver-engine/imports": "^0.3.3", + "debug": "^3.1.0" + } + }, + "node_modules/@resolver-engine/imports-fs/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/@resolver-engine/imports/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/@sentry/core": { + "version": "5.30.0", + "resolved": "https://registry.npmjs.org/@sentry/core/-/core-5.30.0.tgz", + "integrity": "sha512-TmfrII8w1PQZSZgPpUESqjB+jC6MvZJZdLtE/0hZ+SrnKhW3x5WlYLvTXZpcWePYBku7rl2wn1RZu6uT0qCTeg==", + "dev": true, + "dependencies": { + "@sentry/hub": "5.30.0", + "@sentry/minimal": "5.30.0", + "@sentry/types": "5.30.0", + "@sentry/utils": "5.30.0", + "tslib": "^1.9.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@sentry/hub": { + "version": "5.30.0", + "resolved": "https://registry.npmjs.org/@sentry/hub/-/hub-5.30.0.tgz", + "integrity": "sha512-2tYrGnzb1gKz2EkMDQcfLrDTvmGcQPuWxLnJKXJvYTQDGLlEvi2tWz1VIHjunmOvJrB5aIQLhm+dcMRwFZDCqQ==", + "dev": true, + "dependencies": { + "@sentry/types": "5.30.0", + "@sentry/utils": "5.30.0", + "tslib": "^1.9.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@sentry/minimal": { + "version": "5.30.0", + "resolved": "https://registry.npmjs.org/@sentry/minimal/-/minimal-5.30.0.tgz", + "integrity": "sha512-BwWb/owZKtkDX+Sc4zCSTNcvZUq7YcH3uAVlmh/gtR9rmUvbzAA3ewLuB3myi4wWRAMEtny6+J/FN/x+2wn9Xw==", + "dev": true, + "dependencies": { + "@sentry/hub": "5.30.0", + "@sentry/types": "5.30.0", + "tslib": "^1.9.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@sentry/node": { + "version": "5.30.0", + "resolved": "https://registry.npmjs.org/@sentry/node/-/node-5.30.0.tgz", + "integrity": "sha512-Br5oyVBF0fZo6ZS9bxbJZG4ApAjRqAnqFFurMVJJdunNb80brh7a5Qva2kjhm+U6r9NJAB5OmDyPkA1Qnt+QVg==", + "dev": true, + "dependencies": { + "@sentry/core": "5.30.0", + "@sentry/hub": "5.30.0", + "@sentry/tracing": "5.30.0", + "@sentry/types": "5.30.0", + "@sentry/utils": "5.30.0", + "cookie": "^0.4.1", + "https-proxy-agent": "^5.0.0", + "lru_map": "^0.3.3", + "tslib": "^1.9.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@sentry/tracing": { + "version": "5.30.0", + "resolved": "https://registry.npmjs.org/@sentry/tracing/-/tracing-5.30.0.tgz", + "integrity": "sha512-dUFowCr0AIMwiLD7Fs314Mdzcug+gBVo/+NCMyDw8tFxJkwWAKl7Qa2OZxLQ0ZHjakcj1hNKfCQJ9rhyfOl4Aw==", + "dev": true, + "dependencies": { + "@sentry/hub": "5.30.0", + "@sentry/minimal": "5.30.0", + "@sentry/types": "5.30.0", + "@sentry/utils": "5.30.0", + "tslib": "^1.9.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@sentry/types": { + "version": "5.30.0", + "resolved": "https://registry.npmjs.org/@sentry/types/-/types-5.30.0.tgz", + "integrity": "sha512-R8xOqlSTZ+htqrfteCWU5Nk0CDN5ApUTvrlvBuiH1DyP6czDZ4ktbZB0hAgBlVcK0U+qpD3ag3Tqqpa5Q67rPw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/@sentry/utils": { + "version": "5.30.0", + "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-5.30.0.tgz", + "integrity": "sha512-zaYmoH0NWWtvnJjC9/CBseXMtKHm/tm40sz3YfJRxeQjyzRqNQPgivpd9R/oDJCYj999mzdW382p/qi2ypjLww==", + "dev": true, + "dependencies": { + "@sentry/types": "5.30.0", + "tslib": "^1.9.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@sinonjs/commons": { + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.3.tgz", + "integrity": "sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ==", + "dev": true, + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-7.1.2.tgz", + "integrity": "sha512-iQADsW4LBMISqZ6Ci1dupJL9pprqwcVFTcOsEmQOEhW+KLCVn/Y4Jrvg2k19fIHCp+iFprriYPTdRcQR8NbUPg==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.7.0" + } + }, + "node_modules/@solidity-parser/parser": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@solidity-parser/parser/-/parser-0.14.0.tgz", + "integrity": "sha512-cX0JJRcmPtNUJpzD2K7FdA7qQsTOk1UZnFx2k7qAg9ZRvuaH5NBe5IEdBMXGlmf2+FmjhqbygJ26H8l2SV7aKQ==", + "dev": true, + "dependencies": { + "antlr4ts": "^0.5.0-alpha.4" + } + }, + "node_modules/@truffle/hdwallet-provider": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/@truffle/hdwallet-provider/-/hdwallet-provider-1.4.3.tgz", + "integrity": "sha512-Oo8ORAQLfcbLYp6HwG1mpOx6IpVkHv8IkKy25LZUN5Q5bCCqxdlMF0F7CnSXPBdQ+UqZY9+RthC0VrXv9gXiPQ==", + "dependencies": { + "@trufflesuite/web3-provider-engine": "15.0.13-1", + "ethereum-cryptography": "^0.1.3", + "ethereum-protocol": "^1.0.1", + "ethereumjs-common": "^1.5.0", + "ethereumjs-tx": "^2.1.2", + "ethereumjs-util": "^6.1.0", + "ethereumjs-wallet": "^1.0.1" + } + }, + "node_modules/@trufflesuite/eth-json-rpc-filters": { + "version": "4.1.2-1", + "resolved": "https://registry.npmjs.org/@trufflesuite/eth-json-rpc-filters/-/eth-json-rpc-filters-4.1.2-1.tgz", + "integrity": "sha512-/MChvC5dw2ck9NU1cZmdovCz2VKbOeIyR4tcxDvA5sT+NaL0rA2/R5U0yI7zsbo1zD+pgqav77rQHTzpUdDNJQ==", + "dependencies": { + "@trufflesuite/eth-json-rpc-middleware": "^4.4.2-0", + "await-semaphore": "^0.1.3", + "eth-query": "^2.1.2", + "json-rpc-engine": "^5.1.3", + "lodash.flatmap": "^4.5.0", + "safe-event-emitter": "^1.0.1" + } + }, + "node_modules/@trufflesuite/eth-json-rpc-infura": { + "version": "4.0.3-0", + "resolved": "https://registry.npmjs.org/@trufflesuite/eth-json-rpc-infura/-/eth-json-rpc-infura-4.0.3-0.tgz", + "integrity": "sha512-xaUanOmo0YLqRsL0SfXpFienhdw5bpQ1WEXxMTRi57az4lwpZBv4tFUDvcerdwJrxX9wQqNmgUgd1BrR01dumw==", + "dependencies": { + "@trufflesuite/eth-json-rpc-middleware": "^4.4.2-1", + "cross-fetch": "^2.1.1", + "eth-json-rpc-errors": "^1.0.1", + "json-rpc-engine": "^5.1.3" + } + }, + "node_modules/@trufflesuite/eth-json-rpc-infura/node_modules/eth-json-rpc-errors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/eth-json-rpc-errors/-/eth-json-rpc-errors-1.1.1.tgz", + "integrity": "sha512-WT5shJ5KfNqHi9jOZD+ID8I1kuYWNrigtZat7GOQkvwo99f8SzAVaEcWhJUv656WiZOAg3P1RiJQANtUmDmbIg==", + "deprecated": "Package renamed: https://www.npmjs.com/package/eth-rpc-errors", + "dependencies": { + "fast-safe-stringify": "^2.0.6" + } + }, + "node_modules/@trufflesuite/eth-json-rpc-middleware": { + "version": "4.4.2-1", + "resolved": "https://registry.npmjs.org/@trufflesuite/eth-json-rpc-middleware/-/eth-json-rpc-middleware-4.4.2-1.tgz", + "integrity": "sha512-iEy9H8ja7/8aYES5HfrepGBKU9n/Y4OabBJEklVd/zIBlhCCBAWBqkIZgXt11nBXO/rYAeKwYuE3puH3ByYnLA==", + "dependencies": { + "@trufflesuite/eth-sig-util": "^1.4.2", + "btoa": "^1.2.1", + "clone": "^2.1.1", + "eth-json-rpc-errors": "^1.0.1", + "eth-query": "^2.1.2", + "ethereumjs-block": "^1.6.0", + "ethereumjs-tx": "^1.3.7", + "ethereumjs-util": "^5.1.2", + "ethereumjs-vm": "^2.6.0", + "fetch-ponyfill": "^4.0.0", + "json-rpc-engine": "^5.1.3", + "json-stable-stringify": "^1.0.1", + "pify": "^3.0.0", + "safe-event-emitter": "^1.0.1" + } + }, + "node_modules/@trufflesuite/eth-json-rpc-middleware/node_modules/eth-json-rpc-errors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/eth-json-rpc-errors/-/eth-json-rpc-errors-1.1.1.tgz", + "integrity": "sha512-WT5shJ5KfNqHi9jOZD+ID8I1kuYWNrigtZat7GOQkvwo99f8SzAVaEcWhJUv656WiZOAg3P1RiJQANtUmDmbIg==", + "deprecated": "Package renamed: https://www.npmjs.com/package/eth-rpc-errors", + "dependencies": { + "fast-safe-stringify": "^2.0.6" + } + }, + "node_modules/@trufflesuite/eth-json-rpc-middleware/node_modules/ethereum-common": { + "version": "0.0.18", + "resolved": "https://registry.npmjs.org/ethereum-common/-/ethereum-common-0.0.18.tgz", + "integrity": "sha1-L9w1dvIykDNYl26znaeDIT/5Uj8=" + }, + "node_modules/@trufflesuite/eth-json-rpc-middleware/node_modules/ethereumjs-tx": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/ethereumjs-tx/-/ethereumjs-tx-1.3.7.tgz", + "integrity": "sha512-wvLMxzt1RPhAQ9Yi3/HKZTn0FZYpnsmQdbKYfUUpi4j1SEIcbkd9tndVjcPrufY3V7j2IebOpC00Zp2P/Ay2kA==", + "deprecated": "New package name format for new versions: @ethereumjs/tx. Please update.", + "dependencies": { + "ethereum-common": "^0.0.18", + "ethereumjs-util": "^5.0.0" + } + }, + "node_modules/@trufflesuite/eth-json-rpc-middleware/node_modules/ethereumjs-util": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz", + "integrity": "sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ==", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/@trufflesuite/eth-sig-util": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/@trufflesuite/eth-sig-util/-/eth-sig-util-1.4.2.tgz", + "integrity": "sha512-+GyfN6b0LNW77hbQlH3ufZ/1eCON7mMrGym6tdYf7xiNw9Vv3jBO72bmmos1EId2NgBvPMhmYYm6DSLQFTmzrA==", + "dependencies": { + "ethereumjs-abi": "^0.6.8", + "ethereumjs-util": "^5.1.1" + } + }, + "node_modules/@trufflesuite/eth-sig-util/node_modules/ethereumjs-util": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz", + "integrity": "sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ==", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/@trufflesuite/web3-provider-engine": { + "version": "15.0.13-1", + "resolved": "https://registry.npmjs.org/@trufflesuite/web3-provider-engine/-/web3-provider-engine-15.0.13-1.tgz", + "integrity": "sha512-6u3x/iIN5fyj8pib5QTUDmIOUiwAGhaqdSTXdqCu6v9zo2BEwdCqgEJd1uXDh3DBmPRDfiZ/ge8oUPy7LerpHg==", + "dependencies": { + "@trufflesuite/eth-json-rpc-filters": "^4.1.2-1", + "@trufflesuite/eth-json-rpc-infura": "^4.0.3-0", + "@trufflesuite/eth-json-rpc-middleware": "^4.4.2-1", + "@trufflesuite/eth-sig-util": "^1.4.2", + "async": "^2.5.0", + "backoff": "^2.5.0", + "clone": "^2.0.0", + "cross-fetch": "^2.1.0", + "eth-block-tracker": "^4.4.2", + "eth-json-rpc-errors": "^2.0.2", + "ethereumjs-block": "^1.2.2", + "ethereumjs-tx": "^1.2.0", + "ethereumjs-util": "^5.1.5", + "ethereumjs-vm": "^2.3.4", + "json-stable-stringify": "^1.0.1", + "promise-to-callback": "^1.0.0", + "readable-stream": "^2.2.9", + "request": "^2.85.0", + "semaphore": "^1.0.3", + "ws": "^5.1.1", + "xhr": "^2.2.0", + "xtend": "^4.0.1" + } + }, + "node_modules/@trufflesuite/web3-provider-engine/node_modules/ethereum-common": { + "version": "0.0.18", + "resolved": "https://registry.npmjs.org/ethereum-common/-/ethereum-common-0.0.18.tgz", + "integrity": "sha1-L9w1dvIykDNYl26znaeDIT/5Uj8=" + }, + "node_modules/@trufflesuite/web3-provider-engine/node_modules/ethereumjs-tx": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/ethereumjs-tx/-/ethereumjs-tx-1.3.7.tgz", + "integrity": "sha512-wvLMxzt1RPhAQ9Yi3/HKZTn0FZYpnsmQdbKYfUUpi4j1SEIcbkd9tndVjcPrufY3V7j2IebOpC00Zp2P/Ay2kA==", + "deprecated": "New package name format for new versions: @ethereumjs/tx. Please update.", + "dependencies": { + "ethereum-common": "^0.0.18", + "ethereumjs-util": "^5.0.0" + } + }, + "node_modules/@trufflesuite/web3-provider-engine/node_modules/ethereumjs-util": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz", + "integrity": "sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ==", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/@typechain/ethers-v5": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@typechain/ethers-v5/-/ethers-v5-2.0.0.tgz", + "integrity": "sha512-0xdCkyGOzdqh4h5JSf+zoWx85IusEjDcPIwNEHP8mrWSnCae4rvrqB+/gtpdNfX7zjlFlZiMeePn2r63EI3Lrw==", + "dev": true, + "dependencies": { + "ethers": "^5.0.2" + }, + "peerDependencies": { + "ethers": "^5.0.0", + "typechain": "^3.0.0" + } + }, + "node_modules/@types/abstract-leveldown": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@types/abstract-leveldown/-/abstract-leveldown-7.2.0.tgz", + "integrity": "sha512-q5veSX6zjUy/DlDhR4Y4cU0k2Ar+DT2LUraP00T19WLmTO6Se1djepCCaqU6nQrwcJ5Hyo/CWqxTzrrFg8eqbQ==", + "dev": true + }, + "node_modules/@types/bn.js": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@types/bn.js/-/bn.js-5.1.0.tgz", + "integrity": "sha512-QSSVYj7pYFN49kW77o2s9xTCwZ8F2xLbjLLSEVh8D2F4JUhZtPAGOFLTD+ffqksBx/u4cE/KImFjyhqCjn/LIA==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/chai": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.0.tgz", + "integrity": "sha512-/ceqdqeRraGolFTcfoXNiqjyQhZzbINDngeoAq9GoHa8PPK1yNzTaxWjA6BFWp5Ua9JpXEMSS4s5i9tS0hOJtw==", + "dev": true + }, + "node_modules/@types/level-errors": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/level-errors/-/level-errors-3.0.0.tgz", + "integrity": "sha512-/lMtoq/Cf/2DVOm6zE6ORyOM+3ZVm/BvzEZVxUhf6bgh8ZHglXlBqxbxSlJeVp8FCbD3IVvk/VbsaNmDjrQvqQ==", + "dev": true + }, + "node_modules/@types/levelup": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/@types/levelup/-/levelup-4.3.3.tgz", + "integrity": "sha512-K+OTIjJcZHVlZQN1HmU64VtrC0jC3dXWQozuEIR9zVvltIk90zaGPM2AgT+fIkChpzHhFE3YnvFLCbLtzAmexA==", + "dev": true, + "dependencies": { + "@types/abstract-leveldown": "*", + "@types/level-errors": "*", + "@types/node": "*" + } + }, + "node_modules/@types/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@types/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-ssE3Vlrys7sdIzs5LOxCzTVMsU7i9oa/IaW92wF32JFb3CVczqOkru2xspuKczHEbG3nvmPY7IFqVmGGHdNbYw==", + "dev": true + }, + "node_modules/@types/mkdirp": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/@types/mkdirp/-/mkdirp-0.5.2.tgz", + "integrity": "sha512-U5icWpv7YnZYGsN4/cmh3WD2onMY0aJIiTE6+51TwJCttdHvtCYmkBNOobHlXwrJRL0nkH9jH4kD+1FAdMN4Tg==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/node": { + "version": "17.0.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.10.tgz", + "integrity": "sha512-S/3xB4KzyFxYGCppyDt68yzBU9ysL88lSdIah4D6cptdcltc4NCPCAMc0+PCpg/lLIyC7IPvj2Z52OJWeIUkog==" + }, + "node_modules/@types/node-fetch": { + "version": "2.5.12", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.5.12.tgz", + "integrity": "sha512-MKgC4dlq4kKNa/mYrwpKfzQMB5X3ee5U6fSprkKpToBqBmX4nFZL9cW5jl6sWn+xpRJ7ypWh2yyqqr8UUCstSw==", + "dev": true, + "dependencies": { + "@types/node": "*", + "form-data": "^3.0.0" + } + }, + "node_modules/@types/pbkdf2": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@types/pbkdf2/-/pbkdf2-3.1.0.tgz", + "integrity": "sha512-Cf63Rv7jCQ0LaL8tNXmEyqTHuIJxRdlS5vMh1mj5voN4+QFhVZnlZruezqpWYDiJ8UTzhP0VmeLXCmBk66YrMQ==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/prettier": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.4.3.tgz", + "integrity": "sha512-QzSuZMBuG5u8HqYz01qtMdg/Jfctlnvj1z/lYnIDXs/golxw0fxtRAHd9KrzjR7Yxz1qVeI00o0kiO3PmVdJ9w==", + "dev": true + }, + "node_modules/@types/resolve": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-0.0.8.tgz", + "integrity": "sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/secp256k1": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@types/secp256k1/-/secp256k1-4.0.3.tgz", + "integrity": "sha512-Da66lEIFeIz9ltsdMZcpQvmrmmoqrfju8pm1BH8WbYjZSwUgCwXLb9C+9XYogwBITnbsSaMdVPb2ekf7TV+03w==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/sinon": { + "version": "10.0.6", + "resolved": "https://registry.npmjs.org/@types/sinon/-/sinon-10.0.6.tgz", + "integrity": "sha512-6EF+wzMWvBNeGrfP3Nx60hhx+FfwSg1JJBLAAP/IdIUq0EYkqCYf70VT3PhuhPX9eLD+Dp+lNdpb/ZeHG8Yezg==", + "dev": true, + "dependencies": { + "@sinonjs/fake-timers": "^7.1.0" + } + }, + "node_modules/@types/sinon-chai": { + "version": "3.2.8", + "resolved": "https://registry.npmjs.org/@types/sinon-chai/-/sinon-chai-3.2.8.tgz", + "integrity": "sha512-d4ImIQbT/rKMG8+AXpmcan5T2/PNeSjrYhvkwet6z0p8kzYtfgA32xzOBlbU0yqJfq+/0Ml805iFoODO0LP5/g==", + "dev": true, + "dependencies": { + "@types/chai": "*", + "@types/sinon": "*" + } + }, + "node_modules/@types/underscore": { + "version": "1.11.4", + "resolved": "https://registry.npmjs.org/@types/underscore/-/underscore-1.11.4.tgz", + "integrity": "sha512-uO4CD2ELOjw8tasUrAhvnn2W4A0ZECOvMjCivJr4gA9pGgjv+qxKWY9GLTMVEK8ej85BxQOocUyE7hImmSQYcg==", + "dev": true + }, + "node_modules/@types/web3": { + "version": "1.0.19", + "resolved": "https://registry.npmjs.org/@types/web3/-/web3-1.0.19.tgz", + "integrity": "sha512-fhZ9DyvDYDwHZUp5/STa9XW2re0E8GxoioYJ4pEUZ13YHpApSagixj7IAdoYH5uAK+UalGq6Ml8LYzmgRA/q+A==", + "dev": true, + "dependencies": { + "@types/bn.js": "*", + "@types/underscore": "*" + } + }, + "node_modules/@yarnpkg/lockfile": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz", + "integrity": "sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ==", + "dev": true + }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dev": true, + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, + "node_modules/abstract-leveldown": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.3.0.tgz", + "integrity": "sha512-TU5nlYgta8YrBMNpc9FwQzRbiXsj49gsALsXadbGHt9CROPzX5fB0rWDR5mtdpOOKa5XqRFpbj1QroPAoPzVjQ==", + "dev": true, + "dependencies": { + "buffer": "^5.5.0", + "immediate": "^3.2.3", + "level-concat-iterator": "~2.0.0", + "level-supports": "~1.0.0", + "xtend": "~4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/acorn": { + "version": "6.4.2", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.2.tgz", + "integrity": "sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/adm-zip": { + "version": "0.4.16", + "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.4.16.tgz", + "integrity": "sha512-TFi4HBKSGfIKsK5YCkKaaFG2m4PEDyViZmEwof3MTIgzimHLto6muaHVpbrljdIvIrFZzEq/p4nafOeLcYegrg==", + "dev": true, + "engines": { + "node": ">=0.3.0" + } + }, + "node_modules/aes-js": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-3.1.2.tgz", + "integrity": "sha512-e5pEa2kBnBOgR4Y/p20pskXI74UEz7de8ZGVo58asOtvSVG5YAbJeELPZxOmt+Bnz3rX753YKhfIn4X4l1PPRQ==" + }, + "node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/antlr4": { + "version": "4.7.1", + "resolved": "https://registry.npmjs.org/antlr4/-/antlr4-4.7.1.tgz", + "integrity": "sha512-haHyTW7Y9joE5MVs37P2lNYfU2RWBLfcRDD8OWldcdZm5TiCE91B5Xl1oWSwiDUSd4rlExpt2pu1fksYQjRBYQ==", + "dev": true + }, + "node_modules/antlr4ts": { + "version": "0.5.0-alpha.4", + "resolved": "https://registry.npmjs.org/antlr4ts/-/antlr4ts-0.5.0-alpha.4.tgz", + "integrity": "sha512-WPQDt1B74OfPv/IMS2ekXAKkTZIHl88uMetg6q3OTqgFxZ/dxDXI0EWLyZid/1Pe6hTftyg5N7gel5wNAGxXyQ==", + "dev": true + }, + "node_modules/anymatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", + "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/array-back": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/array-back/-/array-back-2.0.0.tgz", + "integrity": "sha512-eJv4pLLufP3g5kcZry0j6WXpIbzYw9GUB4mVJZno9wfwiBxbizTnHCw3VJb07cBihbFX48Y7oSrW9y+gt4glyw==", + "dev": true, + "dependencies": { + "typical": "^2.6.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/asn1": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", + "dependencies": { + "safer-buffer": "~2.1.0" + } + }, + "node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/ast-parents": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/ast-parents/-/ast-parents-0.0.1.tgz", + "integrity": "sha1-UI/Q8F0MSHddnszaLhdEIyYejdM=", + "dev": true + }, + "node_modules/astral-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz", + "integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/async": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", + "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", + "dependencies": { + "lodash": "^4.17.14" + } + }, + "node_modules/async-eventemitter": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/async-eventemitter/-/async-eventemitter-0.2.4.tgz", + "integrity": "sha512-pd20BwL7Yt1zwDFy+8MX8F1+WCT8aQeKj0kQnTrH9WaeRETlRamVhD0JtRPmrV4GfOJ2F9CvdQkZeZhnh2TuHw==", + "dependencies": { + "async": "^2.4.0" + } + }, + "node_modules/async-limiter": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.1.tgz", + "integrity": "sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==" + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" + }, + "node_modules/await-semaphore": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/await-semaphore/-/await-semaphore-0.1.3.tgz", + "integrity": "sha512-d1W2aNSYcz/sxYO4pMGX9vq65qOTu0P800epMud+6cYYX0QcT7zyqcxec3VWzpgvdXo57UWmVbZpLMjX2m1I7Q==" + }, + "node_modules/aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", + "engines": { + "node": "*" + } + }, + "node_modules/aws4": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", + "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==" + }, + "node_modules/axios": { + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz", + "integrity": "sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==", + "dev": true, + "dependencies": { + "follow-redirects": "^1.14.0" + } + }, + "node_modules/babel-plugin-polyfill-corejs2": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.1.tgz", + "integrity": "sha512-v7/T6EQcNfVLfcN2X8Lulb7DjprieyLWJK/zOWH5DUYcAgex9sP3h25Q+DLsX9TloXe3y1O8l2q2Jv9q8UVB9w==", + "dependencies": { + "@babel/compat-data": "^7.13.11", + "@babel/helper-define-polyfill-provider": "^0.3.1", + "semver": "^6.1.1" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/babel-plugin-polyfill-corejs3": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.1.tgz", + "integrity": "sha512-TihqEe4sQcb/QcPJvxe94/9RZuLQuF1+To4WqQcRvc+3J3gLCPIPgDKzGLG6zmQLfH3nn25heRuDNkS2KR4I8A==", + "dependencies": { + "@babel/helper-define-polyfill-provider": "^0.3.1", + "core-js-compat": "^3.20.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/babel-plugin-polyfill-regenerator": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.3.1.tgz", + "integrity": "sha512-Y2B06tvgHYt1x0yz17jGkGeeMr5FeKUu+ASJ+N6nB5lQ8Dapfg42i0OVrf8PNGJ3zKL4A23snMi1IRwrqqND7A==", + "dependencies": { + "@babel/helper-define-polyfill-provider": "^0.3.1" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/backoff": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/backoff/-/backoff-2.5.0.tgz", + "integrity": "sha1-9hbtqdPktmuMp/ynn2lXIsX44m8=", + "dependencies": { + "precond": "0.2" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/base-x": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/base-x/-/base-x-3.0.9.tgz", + "integrity": "sha512-H7JU6iBHTal1gp56aKoaa//YUxEaAOUiydvrV/pILqIHXTtqxSkATOnDA2u+jZ/61sD+L/412+7kzXRtWukhpQ==", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "dependencies": { + "tweetnacl": "^0.14.3" + } + }, + "node_modules/bcrypt-pbkdf/node_modules/tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + }, + "node_modules/bech32": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/bech32/-/bech32-1.1.4.tgz", + "integrity": "sha512-s0IrSOzLlbvX7yp4WBfPITzpAU8sqQcpsmwXDiKwrG4r491vwCO/XpejasRNl0piBMe/DvP4Tz0mIS/X1DPJBQ==", + "dev": true + }, + "node_modules/bignumber.js": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.2.tgz", + "integrity": "sha512-GAcQvbpsM0pUb0zw1EI0KhQEZ+lRwR5fYaAp3vPOYuP7aDvGy6cVN6XHLauvF8SOga2y0dcLcjt3iQDTSEliyw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/blakejs": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/blakejs/-/blakejs-1.1.1.tgz", + "integrity": "sha512-bLG6PHOCZJKNshTjGRBvET0vTciwQE6zFKOKKXPDJfwFBd4Ac0yBfPZqcGvGJap50l7ktvlpFqc2jGVaUgbJgg==" + }, + "node_modules/bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", + "dev": true + }, + "node_modules/bn.js": { + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/brorand": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz", + "integrity": "sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8=" + }, + "node_modules/browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "node_modules/browserify-aes": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", + "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==", + "dependencies": { + "buffer-xor": "^1.0.3", + "cipher-base": "^1.0.0", + "create-hash": "^1.1.0", + "evp_bytestokey": "^1.0.3", + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/browserslist": { + "version": "4.19.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.19.1.tgz", + "integrity": "sha512-u2tbbG5PdKRTUoctO3NBD8FQ5HdPh1ZXPHzp1rwaa5jTc+RV9/+RlWiAIKmjRPQF+xbGM9Kklj5bZQFa2s/38A==", + "dependencies": { + "caniuse-lite": "^1.0.30001286", + "electron-to-chromium": "^1.4.17", + "escalade": "^3.1.1", + "node-releases": "^2.0.1", + "picocolors": "^1.0.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + } + }, + "node_modules/bs58": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/bs58/-/bs58-4.0.1.tgz", + "integrity": "sha1-vhYedsNU9veIrkBx9j806MTwpCo=", + "dependencies": { + "base-x": "^3.0.2" + } + }, + "node_modules/bs58check": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/bs58check/-/bs58check-2.1.2.tgz", + "integrity": "sha512-0TS1jicxdU09dwJMNZtVAfzPi6Q6QeN0pM1Fkzrjn+XYHvzMKPU3pHVpva+769iNVSfIYWf7LJ6WR+BuuMf8cA==", + "dependencies": { + "bs58": "^4.0.0", + "create-hash": "^1.1.0", + "safe-buffer": "^5.1.2" + } + }, + "node_modules/btoa": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/btoa/-/btoa-1.2.1.tgz", + "integrity": "sha512-SB4/MIGlsiVkMcHmT+pSmIPoNDoHg+7cMzmt3Uxt628MTz2487DKSqK/fuhFBrkuqrYv5UCEnACpF4dTFNKc/g==", + "bin": { + "btoa": "bin/btoa.js" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true + }, + "node_modules/buffer-xor": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", + "integrity": "sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=" + }, + "node_modules/bytes": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.1.tgz", + "integrity": "sha512-dWe4nWO/ruEOY7HkUJ5gFt1DCFV9zPRoJr8pV0/ASQermOZjtq8jMjOprC0Kd10GLN+l7xaUPvxzJFWtxGu8Fg==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/caller-callsite": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/caller-callsite/-/caller-callsite-2.0.0.tgz", + "integrity": "sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ=", + "dev": true, + "dependencies": { + "callsites": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/caller-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/caller-path/-/caller-path-2.0.0.tgz", + "integrity": "sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ=", + "dev": true, + "dependencies": { + "caller-callsite": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/callsites": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-2.0.0.tgz", + "integrity": "sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001300", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001300.tgz", + "integrity": "sha512-cVjiJHWGcNlJi8TZVKNMnvMid3Z3TTdDHmLDzlOdIiZq138Exvo0G+G0wTdVYolxKb4AYwC+38pxodiInVtJSA==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + } + }, + "node_modules/caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + }, + "node_modules/cbor": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/cbor/-/cbor-5.2.0.tgz", + "integrity": "sha512-5IMhi9e1QU76ppa5/ajP1BmMWZ2FHkhAhjeVKQ/EFCgYSEaeVaoGtL7cxJskf9oCCk+XjzaIdc3IuU/dbA/o2A==", + "dev": true, + "dependencies": { + "bignumber.js": "^9.0.1", + "nofilter": "^1.0.4" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/chai": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.4.tgz", + "integrity": "sha512-yS5H68VYOCtN1cjfwumDSuzn/9c+yza4f3reKXlE5rUg7SFcCEy90gJvydNgOYtblyf4Zi6jIWRnXOgErta0KA==", + "dev": true, + "dependencies": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^3.0.1", + "get-func-name": "^2.0.0", + "pathval": "^1.1.1", + "type-detect": "^4.0.5" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chardet": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", + "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", + "dev": true + }, + "node_modules/check-error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/checkpoint-store": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/checkpoint-store/-/checkpoint-store-1.1.0.tgz", + "integrity": "sha1-BOTLUWuRQziTWB5tRgGnjpVS6gY=", + "dependencies": { + "functional-red-black-tree": "^1.0.1" + } + }, + "node_modules/chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + ], + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/ci-info": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", + "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==", + "dev": true + }, + "node_modules/cipher-base": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", + "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", + "dependencies": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/circular": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/circular/-/circular-1.0.5.tgz", + "integrity": "sha1-fad6+Yu96c5LWzWM1Va13e0tMUk=", + "dev": true + }, + "node_modules/cli-cursor": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", + "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "dev": true, + "dependencies": { + "restore-cursor": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/cli-logger": { + "version": "0.5.40", + "resolved": "https://registry.npmjs.org/cli-logger/-/cli-logger-0.5.40.tgz", + "integrity": "sha1-CX8OEbByx8aYomxH9YiinCC0iws=", + "dev": true, + "dependencies": { + "circular": "^1.0.5", + "cli-util": "~1.1.27" + } + }, + "node_modules/cli-regexp": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/cli-regexp/-/cli-regexp-0.1.2.tgz", + "integrity": "sha1-a82TsJ+y7RAl0woRVdWZeVSlNRI=", + "dev": true + }, + "node_modules/cli-util": { + "version": "1.1.27", + "resolved": "https://registry.npmjs.org/cli-util/-/cli-util-1.1.27.tgz", + "integrity": "sha1-QtaeNqBAoyH8nPhRwVE8rcUJMFQ=", + "dev": true, + "dependencies": { + "cli-regexp": "~0.1.0" + } + }, + "node_modules/cli-width": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.1.tgz", + "integrity": "sha512-GRMWDxpOB6Dgk2E5Uo+3eEBvtOOlimMmpbFiKuLFnQzYDavtLFY3K5ona41jgN/WdRZtG7utuVSVTL4HbZHGkw==", + "dev": true + }, + "node_modules/cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "dev": true, + "dependencies": { + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" + } + }, + "node_modules/cliui/node_modules/ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/cliui/node_modules/string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "dependencies": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/cliui/node_modules/strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "dependencies": { + "ansi-regex": "^4.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/clone": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz", + "integrity": "sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18=", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/command-exists": { + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/command-exists/-/command-exists-1.2.9.tgz", + "integrity": "sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w==", + "dev": true + }, + "node_modules/command-line-args": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/command-line-args/-/command-line-args-4.0.7.tgz", + "integrity": "sha512-aUdPvQRAyBvQd2n7jXcsMDz68ckBJELXNzBybCHOibUWEg0mWTnaYCSRU8h9R+aNRSvDihJtssSRCiDRpLaezA==", + "dev": true, + "dependencies": { + "array-back": "^2.0.0", + "find-replace": "^1.0.3", + "typical": "^2.6.1" + }, + "bin": { + "command-line-args": "bin/cli.js" + } + }, + "node_modules/commander": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/commander/-/commander-3.0.2.tgz", + "integrity": "sha512-Gar0ASD4BDyKC4hl4DwHqDrmvjoxWKZigVnAbn5H1owvm4CxCPdb0HQDehwNYMJpla5+M2tPmPARzhtYuwpHow==", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "node_modules/convert-source-map": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz", + "integrity": "sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==", + "peer": true, + "dependencies": { + "safe-buffer": "~5.1.1" + } + }, + "node_modules/cookie": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.1.tgz", + "integrity": "sha512-ZwrFkGJxUR3EIoXtO+yVE69Eb7KlixbaeAWfBQB9vVsNn/o+Yw69gBWSSDK825hQNdN+wF8zELf3dFNl/kxkUA==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/core-js-compat": { + "version": "3.20.3", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.20.3.tgz", + "integrity": "sha512-c8M5h0IkNZ+I92QhIpuSijOxGAcj3lgpsWdkCqmUTZNwidujF4r3pi6x1DCN+Vcs5qTS2XWWMfWSuCqyupX8gw==", + "dependencies": { + "browserslist": "^4.19.1", + "semver": "7.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + } + }, + "node_modules/core-js-compat/node_modules/semver": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", + "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/core-js-pure": { + "version": "3.20.3", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.20.3.tgz", + "integrity": "sha512-Q2H6tQ5MtPtcC7f3HxJ48i4Q7T9ybPKgvWyuH7JXIoNa2pm0KuBnycsET/qw1SLLZYfbsbrZQNMeIOClb+6WIA==", + "dev": true, + "hasInstallScript": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + } + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" + }, + "node_modules/cosmiconfig": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-5.2.1.tgz", + "integrity": "sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA==", + "dev": true, + "dependencies": { + "import-fresh": "^2.0.0", + "is-directory": "^0.3.1", + "js-yaml": "^3.13.1", + "parse-json": "^4.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/cosmiconfig/node_modules/parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", + "dev": true, + "dependencies": { + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/crc-32": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.0.tgz", + "integrity": "sha512-1uBwHxF+Y/4yF5G48fwnKq6QsIXheor3ZLPT80yGBV1oEUwpPojlEhQbWKVw1VwcTQyMGHK1/XMmTjmlsmTTGA==", + "dev": true, + "dependencies": { + "exit-on-epipe": "~1.0.1", + "printj": "~1.1.0" + }, + "bin": { + "crc32": "bin/crc32.njs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/create-hash": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", + "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", + "dependencies": { + "cipher-base": "^1.0.1", + "inherits": "^2.0.1", + "md5.js": "^1.3.4", + "ripemd160": "^2.0.1", + "sha.js": "^2.4.0" + } + }, + "node_modules/create-hmac": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", + "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==", + "dependencies": { + "cipher-base": "^1.0.3", + "create-hash": "^1.1.0", + "inherits": "^2.0.1", + "ripemd160": "^2.0.0", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + } + }, + "node_modules/cross-fetch": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-2.2.5.tgz", + "integrity": "sha512-xqYAhQb4NhCJSRym03dwxpP1bYXpK3y7UN83Bo2WFi3x1Zmzn0SL/6xGoPr+gpt4WmNrgCCX3HPysvOwFOW36w==", + "dependencies": { + "node-fetch": "2.6.1", + "whatwg-fetch": "2.0.4" + } + }, + "node_modules/cross-fetch/node_modules/node-fetch": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz", + "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==", + "engines": { + "node": "4.x || >=6.0.0" + } + }, + "node_modules/cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dev": true, + "dependencies": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "engines": { + "node": ">=4.8" + } + }, + "node_modules/cross-spawn/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "dependencies": { + "assert-plus": "^1.0.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/debug": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", + "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/deep-eql": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", + "dev": true, + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=0.12" + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "node_modules/deferred-leveldown": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/deferred-leveldown/-/deferred-leveldown-5.3.0.tgz", + "integrity": "sha512-a59VOT+oDy7vtAbLRCZwWgxu2BaCfd5Hk7wxJd48ei7I+nsg8Orlb9CLG0PMZienk9BSUKgeAqkO2+Lw+1+Ukw==", + "dev": true, + "dependencies": { + "abstract-leveldown": "~6.2.1", + "inherits": "^2.0.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/deferred-leveldown/node_modules/abstract-leveldown": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz", + "integrity": "sha512-BsLm5vFMRUrrLeCcRc+G0t2qOaTzpoJQLOubq2XM72eNpjF5UdU5o/5NvlNhx95XHcAvcl8OMXr4mlg/fRgUXQ==", + "dev": true, + "dependencies": { + "buffer": "^5.5.0", + "immediate": "^3.2.3", + "level-concat-iterator": "~2.0.0", + "level-supports": "~1.0.0", + "xtend": "~4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/define-properties": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "dev": true, + "dependencies": { + "object-keys": "^1.0.12" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/delay": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/delay/-/delay-5.0.0.tgz", + "integrity": "sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/diff": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/dom-walk": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.2.tgz", + "integrity": "sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w==" + }, + "node_modules/ecc-jsbn": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", + "dependencies": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.4.48", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.48.tgz", + "integrity": "sha512-RT3SEmpv7XUA+tKXrZGudAWLDpa7f8qmhjcLaM6OD/ERxjQ/zAojT8/Vvo0BSzbArkElFZ1WyZ9FuwAYbkdBNA==" + }, + "node_modules/elliptic": { + "version": "6.5.4", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz", + "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==", + "dependencies": { + "bn.js": "^4.11.9", + "brorand": "^1.1.0", + "hash.js": "^1.0.0", + "hmac-drbg": "^1.0.1", + "inherits": "^2.0.4", + "minimalistic-assert": "^1.0.1", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "node_modules/emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "node_modules/encoding": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", + "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", + "dependencies": { + "iconv-lite": "^0.6.2" + } + }, + "node_modules/encoding-down": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/encoding-down/-/encoding-down-6.3.0.tgz", + "integrity": "sha512-QKrV0iKR6MZVJV08QY0wp1e7vF6QbhnbQhb07bwpEyuz4uZiZgPlEGdkCROuFkUwdxlFaiPIhjyarH1ee/3vhw==", + "dev": true, + "dependencies": { + "abstract-leveldown": "^6.2.1", + "inherits": "^2.0.3", + "level-codec": "^9.0.0", + "level-errors": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/encoding/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/enquirer": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", + "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", + "dev": true, + "dependencies": { + "ansi-colors": "^4.1.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/env-paths": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/errno": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz", + "integrity": "sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==", + "dependencies": { + "prr": "~1.0.1" + }, + "bin": { + "errno": "cli.js" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/es-abstract": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.19.1.tgz", + "integrity": "sha512-2vJ6tjA/UfqLm2MPs7jxVybLoB8i1t1Jd9R3kISld20sIxPcTbLuggQOUxeWeAvIUkduv/CfMjuh4WmiXr2v9w==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "get-intrinsic": "^1.1.1", + "get-symbol-description": "^1.0.0", + "has": "^1.0.3", + "has-symbols": "^1.0.2", + "internal-slot": "^1.0.3", + "is-callable": "^1.2.4", + "is-negative-zero": "^2.0.1", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.1", + "is-string": "^1.0.7", + "is-weakref": "^1.0.1", + "object-inspect": "^1.11.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.2", + "string.prototype.trimend": "^1.0.4", + "string.prototype.trimstart": "^1.0.4", + "unbox-primitive": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-abstract/node_modules/object.assign": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz", + "integrity": "sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "has-symbols": "^1.0.1", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "dependencies": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/eslint": { + "version": "5.16.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz", + "integrity": "sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.9.1", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^4.0.3", + "eslint-utils": "^1.3.1", + "eslint-visitor-keys": "^1.0.0", + "espree": "^5.0.1", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob": "^7.1.2", + "globals": "^11.7.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^6.2.2", + "js-yaml": "^3.13.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.11", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.2", + "path-is-inside": "^1.0.2", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^5.5.1", + "strip-ansi": "^4.0.0", + "strip-json-comments": "^2.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^6.14.0 || ^8.10.0 || >=9.10.0" + } + }, + "node_modules/eslint-scope": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", + "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", + "dev": true, + "dependencies": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/eslint-utils": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", + "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^1.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/eslint/node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint/node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/eslint/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/espree": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-5.0.1.tgz", + "integrity": "sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A==", + "dev": true, + "dependencies": { + "acorn": "^6.0.7", + "acorn-jsx": "^5.0.0", + "eslint-visitor-keys": "^1.0.0" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esquery": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", + "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "dev": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esquery/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esrecurse/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eth-block-tracker": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/eth-block-tracker/-/eth-block-tracker-4.4.3.tgz", + "integrity": "sha512-A8tG4Z4iNg4mw5tP1Vung9N9IjgMNqpiMoJ/FouSFwNCGHv2X0mmOYwtQOJzki6XN7r7Tyo01S29p7b224I4jw==", + "dependencies": { + "@babel/plugin-transform-runtime": "^7.5.5", + "@babel/runtime": "^7.5.5", + "eth-query": "^2.1.0", + "json-rpc-random-id": "^1.0.1", + "pify": "^3.0.0", + "safe-event-emitter": "^1.0.1" + } + }, + "node_modules/eth-ens-namehash": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/eth-ens-namehash/-/eth-ens-namehash-2.0.8.tgz", + "integrity": "sha1-IprEbsqG1S4MmR58sq74P/D2i88=", + "dev": true, + "dependencies": { + "idna-uts46-hx": "^2.3.1", + "js-sha3": "^0.5.7" + } + }, + "node_modules/eth-ens-namehash/node_modules/js-sha3": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.5.7.tgz", + "integrity": "sha1-DU/9gALVMzqrr0oj7tL2N0yfKOc=", + "dev": true + }, + "node_modules/eth-json-rpc-errors": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/eth-json-rpc-errors/-/eth-json-rpc-errors-2.0.2.tgz", + "integrity": "sha512-uBCRM2w2ewusRHGxN8JhcuOb2RN3ueAOYH/0BhqdFmQkZx5lj5+fLKTz0mIVOzd4FG5/kUksCzCD7eTEim6gaA==", + "deprecated": "Package renamed: https://www.npmjs.com/package/eth-rpc-errors", + "dependencies": { + "fast-safe-stringify": "^2.0.6" + } + }, + "node_modules/eth-query": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/eth-query/-/eth-query-2.1.2.tgz", + "integrity": "sha1-1nQdkAAQa1FRDHLbktY2VFam2l4=", + "dependencies": { + "json-rpc-random-id": "^1.0.0", + "xtend": "^4.0.1" + } + }, + "node_modules/eth-rpc-errors": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eth-rpc-errors/-/eth-rpc-errors-3.0.0.tgz", + "integrity": "sha512-iPPNHPrLwUlR9xCSYm7HHQjWBasor3+KZfRvwEWxMz3ca0yqnlBeJrnyphkGIXZ4J7AMAaOLmwy4AWhnxOiLxg==", + "dependencies": { + "fast-safe-stringify": "^2.0.6" + } + }, + "node_modules/eth-sig-util": { + "version": "2.5.4", + "resolved": "https://registry.npmjs.org/eth-sig-util/-/eth-sig-util-2.5.4.tgz", + "integrity": "sha512-aCMBwp8q/4wrW4QLsF/HYBOSA7TpLKmkVwP3pYQNkEEseW2Rr8Z5Uxc9/h6HX+OG3tuHo+2bINVSihIeBfym6A==", + "deprecated": "Deprecated in favor of '@metamask/eth-sig-util'", + "dev": true, + "dependencies": { + "ethereumjs-abi": "0.6.8", + "ethereumjs-util": "^5.1.1", + "tweetnacl": "^1.0.3", + "tweetnacl-util": "^0.15.0" + } + }, + "node_modules/eth-sig-util/node_modules/ethereumjs-util": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz", + "integrity": "sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ==", + "dev": true, + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ethereum-bloom-filters": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/ethereum-bloom-filters/-/ethereum-bloom-filters-1.0.10.tgz", + "integrity": "sha512-rxJ5OFN3RwjQxDcFP2Z5+Q9ho4eIdEmSc2ht0fCu8Se9nbXjZ7/031uXoUYJ87KHCOdVeiUuwSnoS7hmYAGVHA==", + "dev": true, + "dependencies": { + "js-sha3": "^0.8.0" + } + }, + "node_modules/ethereum-common": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/ethereum-common/-/ethereum-common-0.2.0.tgz", + "integrity": "sha512-XOnAR/3rntJgbCdGhqdaLIxDLWKLmsZOGhHdBKadEr6gEnJLH52k93Ou+TUdFaPN3hJc3isBZBal3U/XZ15abA==" + }, + "node_modules/ethereum-cryptography": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/ethereum-cryptography/-/ethereum-cryptography-0.1.3.tgz", + "integrity": "sha512-w8/4x1SGGzc+tO97TASLja6SLd3fRIK2tLVcV2Gx4IB21hE19atll5Cq9o3d0ZmAYC/8aw0ipieTSiekAea4SQ==", + "dependencies": { + "@types/pbkdf2": "^3.0.0", + "@types/secp256k1": "^4.0.1", + "blakejs": "^1.1.0", + "browserify-aes": "^1.2.0", + "bs58check": "^2.1.2", + "create-hash": "^1.2.0", + "create-hmac": "^1.1.7", + "hash.js": "^1.1.7", + "keccak": "^3.0.0", + "pbkdf2": "^3.0.17", + "randombytes": "^2.1.0", + "safe-buffer": "^5.1.2", + "scrypt-js": "^3.0.0", + "secp256k1": "^4.0.1", + "setimmediate": "^1.0.5" + } + }, + "node_modules/ethereum-protocol": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ethereum-protocol/-/ethereum-protocol-1.0.1.tgz", + "integrity": "sha512-3KLX1mHuEsBW0dKG+c6EOJS1NBNqdCICvZW9sInmZTt5aY0oxmHVggYRE0lJu1tcnMD1K+AKHdLi6U43Awm1Vg==" + }, + "node_modules/ethereum-waffle": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/ethereum-waffle/-/ethereum-waffle-3.4.0.tgz", + "integrity": "sha512-ADBqZCkoSA5Isk486ntKJVjFEawIiC+3HxNqpJqONvh3YXBTNiRfXvJtGuAFLXPG91QaqkGqILEHANAo7j/olQ==", + "dev": true, + "dependencies": { + "@ethereum-waffle/chai": "^3.4.0", + "@ethereum-waffle/compiler": "^3.4.0", + "@ethereum-waffle/mock-contract": "^3.3.0", + "@ethereum-waffle/provider": "^3.4.0", + "ethers": "^5.0.1" + }, + "bin": { + "waffle": "bin/waffle" + }, + "engines": { + "node": ">=10.0" + } + }, + "node_modules/ethereumjs-abi": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/ethereumjs-abi/-/ethereumjs-abi-0.6.8.tgz", + "integrity": "sha512-Tx0r/iXI6r+lRsdvkFDlut0N08jWMnKRZ6Gkq+Nmw75lZe4e6o3EkSnkaBP5NF6+m5PTGAr9JP43N3LyeoglsA==", + "dependencies": { + "bn.js": "^4.11.8", + "ethereumjs-util": "^6.0.0" + } + }, + "node_modules/ethereumjs-account": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/ethereumjs-account/-/ethereumjs-account-2.0.5.tgz", + "integrity": "sha512-bgDojnXGjhMwo6eXQC0bY6UK2liSFUSMwwylOmQvZbSl/D7NXQ3+vrGO46ZeOgjGfxXmgIeVNDIiHw7fNZM4VA==", + "dependencies": { + "ethereumjs-util": "^5.0.0", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ethereumjs-account/node_modules/ethereumjs-util": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz", + "integrity": "sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ==", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ethereumjs-block": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/ethereumjs-block/-/ethereumjs-block-1.7.1.tgz", + "integrity": "sha512-B+sSdtqm78fmKkBq78/QLKJbu/4Ts4P2KFISdgcuZUPDm9x+N7qgBPIIFUGbaakQh8bzuquiRVbdmvPKqbILRg==", + "deprecated": "New package name format for new versions: @ethereumjs/block. Please update.", + "dependencies": { + "async": "^2.0.1", + "ethereum-common": "0.2.0", + "ethereumjs-tx": "^1.2.2", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + } + }, + "node_modules/ethereumjs-block/node_modules/ethereumjs-tx": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/ethereumjs-tx/-/ethereumjs-tx-1.3.7.tgz", + "integrity": "sha512-wvLMxzt1RPhAQ9Yi3/HKZTn0FZYpnsmQdbKYfUUpi4j1SEIcbkd9tndVjcPrufY3V7j2IebOpC00Zp2P/Ay2kA==", + "deprecated": "New package name format for new versions: @ethereumjs/tx. Please update.", + "dependencies": { + "ethereum-common": "^0.0.18", + "ethereumjs-util": "^5.0.0" + } + }, + "node_modules/ethereumjs-block/node_modules/ethereumjs-tx/node_modules/ethereum-common": { + "version": "0.0.18", + "resolved": "https://registry.npmjs.org/ethereum-common/-/ethereum-common-0.0.18.tgz", + "integrity": "sha1-L9w1dvIykDNYl26znaeDIT/5Uj8=" + }, + "node_modules/ethereumjs-block/node_modules/ethereumjs-util": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz", + "integrity": "sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ==", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ethereumjs-common": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/ethereumjs-common/-/ethereumjs-common-1.5.2.tgz", + "integrity": "sha512-hTfZjwGX52GS2jcVO6E2sx4YuFnf0Fhp5ylo4pEPhEffNln7vS59Hr5sLnp3/QCazFLluuBZ+FZ6J5HTp0EqCA==", + "deprecated": "New package name format for new versions: @ethereumjs/common. Please update." + }, + "node_modules/ethereumjs-tx": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ethereumjs-tx/-/ethereumjs-tx-2.1.2.tgz", + "integrity": "sha512-zZEK1onCeiORb0wyCXUvg94Ve5It/K6GD1K+26KfFKodiBiS6d9lfCXlUKGBBdQ+bv7Day+JK0tj1K+BeNFRAw==", + "deprecated": "New package name format for new versions: @ethereumjs/tx. Please update.", + "dependencies": { + "ethereumjs-common": "^1.5.0", + "ethereumjs-util": "^6.0.0" + } + }, + "node_modules/ethereumjs-util": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-6.2.1.tgz", + "integrity": "sha512-W2Ktez4L01Vexijrm5EB6w7dg4n/TgpoYU4avuT5T3Vmnw/eCRtiBrJfQYS/DCSvDIOLn2k57GcHdeBcgVxAqw==", + "dependencies": { + "@types/bn.js": "^4.11.3", + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "0.1.6", + "rlp": "^2.2.3" + } + }, + "node_modules/ethereumjs-util/node_modules/@types/bn.js": { + "version": "4.11.6", + "resolved": "https://registry.npmjs.org/@types/bn.js/-/bn.js-4.11.6.tgz", + "integrity": "sha512-pqr857jrp2kPuO9uRjZ3PwnJTjoQy+fcdxvBTvHm6dkmEL9q+hDD/2j/0ELOBPtPnS8LjCX0gI9nbl8lVkadpg==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/ethereumjs-vm": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/ethereumjs-vm/-/ethereumjs-vm-2.6.0.tgz", + "integrity": "sha512-r/XIUik/ynGbxS3y+mvGnbOKnuLo40V5Mj1J25+HEO63aWYREIqvWeRO/hnROlMBE5WoniQmPmhiaN0ctiHaXw==", + "deprecated": "New package name format for new versions: @ethereumjs/vm. Please update.", + "dependencies": { + "async": "^2.1.2", + "async-eventemitter": "^0.2.2", + "ethereumjs-account": "^2.0.3", + "ethereumjs-block": "~2.2.0", + "ethereumjs-common": "^1.1.0", + "ethereumjs-util": "^6.0.0", + "fake-merkle-patricia-tree": "^1.0.1", + "functional-red-black-tree": "^1.0.1", + "merkle-patricia-tree": "^2.3.2", + "rustbn.js": "~0.2.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ethereumjs-vm/node_modules/ethereumjs-block": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/ethereumjs-block/-/ethereumjs-block-2.2.2.tgz", + "integrity": "sha512-2p49ifhek3h2zeg/+da6XpdFR3GlqY3BIEiqxGF8j9aSRIgkb7M1Ky+yULBKJOu8PAZxfhsYA+HxUk2aCQp3vg==", + "deprecated": "New package name format for new versions: @ethereumjs/block. Please update.", + "dependencies": { + "async": "^2.0.1", + "ethereumjs-common": "^1.5.0", + "ethereumjs-tx": "^2.1.1", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + } + }, + "node_modules/ethereumjs-vm/node_modules/ethereumjs-block/node_modules/ethereumjs-util": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz", + "integrity": "sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ==", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ethereumjs-wallet": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/ethereumjs-wallet/-/ethereumjs-wallet-1.0.2.tgz", + "integrity": "sha512-CCWV4RESJgRdHIvFciVQFnCHfqyhXWchTPlkfp28Qc53ufs+doi5I/cV2+xeK9+qEo25XCWfP9MiL+WEPAZfdA==", + "dependencies": { + "aes-js": "^3.1.2", + "bs58check": "^2.1.2", + "ethereum-cryptography": "^0.1.3", + "ethereumjs-util": "^7.1.2", + "randombytes": "^2.1.0", + "scrypt-js": "^3.0.1", + "utf8": "^3.0.0", + "uuid": "^8.3.2" + } + }, + "node_modules/ethereumjs-wallet/node_modules/bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==" + }, + "node_modules/ethereumjs-wallet/node_modules/ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dependencies": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/ethers": { + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/ethers/-/ethers-5.5.3.tgz", + "integrity": "sha512-fTT4WT8/hTe/BLwRUtl7I5zlpF3XC3P/Xwqxc5AIP2HGlH15qpmjs0Ou78az93b1rLITzXLFxoNX63B8ZbUd7g==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/abi": "5.5.0", + "@ethersproject/abstract-provider": "5.5.1", + "@ethersproject/abstract-signer": "5.5.0", + "@ethersproject/address": "5.5.0", + "@ethersproject/base64": "5.5.0", + "@ethersproject/basex": "5.5.0", + "@ethersproject/bignumber": "5.5.0", + "@ethersproject/bytes": "5.5.0", + "@ethersproject/constants": "5.5.0", + "@ethersproject/contracts": "5.5.0", + "@ethersproject/hash": "5.5.0", + "@ethersproject/hdnode": "5.5.0", + "@ethersproject/json-wallets": "5.5.0", + "@ethersproject/keccak256": "5.5.0", + "@ethersproject/logger": "5.5.0", + "@ethersproject/networks": "5.5.2", + "@ethersproject/pbkdf2": "5.5.0", + "@ethersproject/properties": "5.5.0", + "@ethersproject/providers": "5.5.2", + "@ethersproject/random": "5.5.1", + "@ethersproject/rlp": "5.5.0", + "@ethersproject/sha2": "5.5.0", + "@ethersproject/signing-key": "5.5.0", + "@ethersproject/solidity": "5.5.0", + "@ethersproject/strings": "5.5.0", + "@ethersproject/transactions": "5.5.0", + "@ethersproject/units": "5.5.0", + "@ethersproject/wallet": "5.5.0", + "@ethersproject/web": "5.5.1", + "@ethersproject/wordlists": "5.5.0" + } + }, + "node_modules/ethjs-unit": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/ethjs-unit/-/ethjs-unit-0.1.6.tgz", + "integrity": "sha1-xmWSHkduh7ziqdWIpv4EBbLEFpk=", + "dev": true, + "dependencies": { + "bn.js": "4.11.6", + "number-to-bn": "1.7.0" + }, + "engines": { + "node": ">=6.5.0", + "npm": ">=3" + } + }, + "node_modules/ethjs-unit/node_modules/bn.js": { + "version": "4.11.6", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.6.tgz", + "integrity": "sha1-UzRK2xRhehP26N0s4okF0cC6MhU=", + "dev": true + }, + "node_modules/ethjs-util": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/ethjs-util/-/ethjs-util-0.1.6.tgz", + "integrity": "sha512-CUnVOQq7gSpDHZVVrQW8ExxUETWrnrvXYvYz55wOU8Uj4VCgw56XC2B/fVqQN+f7gmrnRHSLVnFAwsCuNwji8w==", + "dependencies": { + "is-hex-prefixed": "1.0.0", + "strip-hex-prefix": "1.0.0" + }, + "engines": { + "node": ">=6.5.0", + "npm": ">=3" + } + }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/evp_bytestokey": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", + "integrity": "sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==", + "dependencies": { + "md5.js": "^1.3.4", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/exit-on-epipe": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/exit-on-epipe/-/exit-on-epipe-1.0.1.tgz", + "integrity": "sha512-h2z5mrROTxce56S+pnvAV890uu7ls7f1kEvVGJbw1OlFH3/mlJ5bkXu0KRyW94v37zzHPiUd55iLn3DA7TjWpw==", + "dev": true, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, + "node_modules/external-editor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", + "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "dev": true, + "dependencies": { + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", + "tmp": "^0.0.33" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", + "engines": [ + "node >=0.6.0" + ] + }, + "node_modules/fake-merkle-patricia-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/fake-merkle-patricia-tree/-/fake-merkle-patricia-tree-1.0.1.tgz", + "integrity": "sha1-S4w6z7Ugr635hgsfFM2M40As3dM=", + "dependencies": { + "checkpoint-store": "^1.1.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "node_modules/fast-diff": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz", + "integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==", + "dev": true + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", + "dev": true + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, + "node_modules/fetch-ponyfill": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/fetch-ponyfill/-/fetch-ponyfill-4.1.0.tgz", + "integrity": "sha1-rjzl9zLGReq4fkroeTQUcJsjmJM=", + "dependencies": { + "node-fetch": "~1.7.1" + } + }, + "node_modules/fetch-ponyfill/node_modules/node-fetch": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-1.7.3.tgz", + "integrity": "sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==", + "dependencies": { + "encoding": "^0.1.11", + "is-stream": "^1.0.1" + } + }, + "node_modules/figures": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", + "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "dev": true, + "dependencies": { + "escape-string-regexp": "^1.0.5" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/file-entry-cache": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", + "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", + "dev": true, + "dependencies": { + "flat-cache": "^2.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-replace": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/find-replace/-/find-replace-1.0.3.tgz", + "integrity": "sha1-uI5zZNLZyVlVnziMZmcNYTBEH6A=", + "dev": true, + "dependencies": { + "array-back": "^1.0.4", + "test-value": "^2.1.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/find-replace/node_modules/array-back": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/array-back/-/array-back-1.0.4.tgz", + "integrity": "sha1-ZEun8JX3/898Q7Xw3DnTwfA8Bjs=", + "dev": true, + "dependencies": { + "typical": "^2.6.0" + }, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "dev": true, + "dependencies": { + "locate-path": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/find-yarn-workspace-root": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/find-yarn-workspace-root/-/find-yarn-workspace-root-2.0.0.tgz", + "integrity": "sha512-1IMnbjt4KzsQfnhnzNd8wUEgXZ44IzZaZmnLYx7D5FZlaHt2gW20Cri8Q+E/t5tIj4+epTBub+2Zxu/vNILzqQ==", + "dev": true, + "dependencies": { + "micromatch": "^4.0.2" + } + }, + "node_modules/flat": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/flat/-/flat-4.1.1.tgz", + "integrity": "sha512-FmTtBsHskrU6FJ2VxCnsDb84wu9zhmO3cUX2kGFb5tuwhfXxGciiT0oRY+cck35QmG+NmGh5eLz6lLCpWTqwpA==", + "dev": true, + "dependencies": { + "is-buffer": "~2.0.3" + }, + "bin": { + "flat": "cli.js" + } + }, + "node_modules/flat-cache": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", + "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", + "dev": true, + "dependencies": { + "flatted": "^2.0.0", + "rimraf": "2.6.3", + "write": "1.0.3" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/flat-cache/node_modules/rimraf": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", + "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/flatted": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz", + "integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==", + "dev": true + }, + "node_modules/follow-redirects": { + "version": "1.14.7", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.7.tgz", + "integrity": "sha512-+hbxoLbFMbRKDwohX8GkTataGqO6Jb7jGwpAlwgy2bIz25XtRm7KEzJM76R1WiNT5SwZkX4Y75SwBolkpmE7iQ==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", + "engines": { + "node": "*" + } + }, + "node_modules/form-data": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", + "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "dev": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fp-ts": { + "version": "1.19.3", + "resolved": "https://registry.npmjs.org/fp-ts/-/fp-ts-1.19.3.tgz", + "integrity": "sha512-H5KQDspykdHuztLTg+ajGN0Z2qUjcEf3Ybxc6hLt0k7/zPkn29XnKnxlBPyW2XIddWrGaJBzBl4VLYOtk39yZg==", + "dev": true + }, + "node_modules/fs-extra": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-7.0.1.tgz", + "integrity": "sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + }, + "engines": { + "node": ">=6 <7 || >=8" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "node_modules/functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=" + }, + "node_modules/ganache-core": { + "version": "2.13.2", + "resolved": "https://registry.npmjs.org/ganache-core/-/ganache-core-2.13.2.tgz", + "integrity": "sha512-tIF5cR+ANQz0+3pHWxHjIwHqFXcVo0Mb+kcsNhglNFALcYo49aQpnS9dqHartqPfMFjiHh/qFoD3mYK0d/qGgw==", + "bundleDependencies": [ + "keccak" + ], + "dev": true, + "hasShrinkwrap": true, + "dependencies": { + "abstract-leveldown": "3.0.0", + "async": "2.6.2", + "bip39": "2.5.0", + "cachedown": "1.0.0", + "clone": "2.1.2", + "debug": "3.2.6", + "encoding-down": "5.0.4", + "eth-sig-util": "3.0.0", + "ethereumjs-abi": "0.6.8", + "ethereumjs-account": "3.0.0", + "ethereumjs-block": "2.2.2", + "ethereumjs-common": "1.5.0", + "ethereumjs-tx": "2.1.2", + "ethereumjs-util": "6.2.1", + "ethereumjs-vm": "4.2.0", + "heap": "0.2.6", + "keccak": "3.0.1", + "level-sublevel": "6.6.4", + "levelup": "3.1.1", + "lodash": "4.17.20", + "lru-cache": "5.1.1", + "merkle-patricia-tree": "3.0.0", + "patch-package": "6.2.2", + "seedrandom": "3.0.1", + "source-map-support": "0.5.12", + "tmp": "0.1.0", + "web3-provider-engine": "14.2.1", + "websocket": "1.0.32" + }, + "engines": { + "node": ">=8.9.0" + }, + "optionalDependencies": { + "ethereumjs-wallet": "0.6.5", + "web3": "1.2.11" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/abi": { + "version": "5.0.0-beta.153", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/address": ">=5.0.0-beta.128", + "@ethersproject/bignumber": ">=5.0.0-beta.130", + "@ethersproject/bytes": ">=5.0.0-beta.129", + "@ethersproject/constants": ">=5.0.0-beta.128", + "@ethersproject/hash": ">=5.0.0-beta.128", + "@ethersproject/keccak256": ">=5.0.0-beta.127", + "@ethersproject/logger": ">=5.0.0-beta.129", + "@ethersproject/properties": ">=5.0.0-beta.131", + "@ethersproject/strings": ">=5.0.0-beta.130" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/abstract-provider": { + "version": "5.0.8", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/bignumber": "^5.0.13", + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/logger": "^5.0.8", + "@ethersproject/networks": "^5.0.7", + "@ethersproject/properties": "^5.0.7", + "@ethersproject/transactions": "^5.0.9", + "@ethersproject/web": "^5.0.12" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/abstract-signer": { + "version": "5.0.10", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/abstract-provider": "^5.0.8", + "@ethersproject/bignumber": "^5.0.13", + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/logger": "^5.0.8", + "@ethersproject/properties": "^5.0.7" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/address": { + "version": "5.0.9", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/bignumber": "^5.0.13", + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/keccak256": "^5.0.7", + "@ethersproject/logger": "^5.0.8", + "@ethersproject/rlp": "^5.0.7" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/base64": { + "version": "5.0.7", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/bytes": "^5.0.9" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/bignumber": { + "version": "5.0.13", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/logger": "^5.0.8", + "bn.js": "^4.4.0" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/bytes": { + "version": "5.0.9", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/logger": "^5.0.8" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/constants": { + "version": "5.0.8", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/bignumber": "^5.0.13" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/hash": { + "version": "5.0.10", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/abstract-signer": "^5.0.10", + "@ethersproject/address": "^5.0.9", + "@ethersproject/bignumber": "^5.0.13", + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/keccak256": "^5.0.7", + "@ethersproject/logger": "^5.0.8", + "@ethersproject/properties": "^5.0.7", + "@ethersproject/strings": "^5.0.8" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/keccak256": { + "version": "5.0.7", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/bytes": "^5.0.9", + "js-sha3": "0.5.7" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/logger": { + "version": "5.0.8", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/@ethersproject/networks": { + "version": "5.0.7", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/logger": "^5.0.8" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/properties": { + "version": "5.0.7", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/logger": "^5.0.8" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/rlp": { + "version": "5.0.7", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/logger": "^5.0.8" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/signing-key": { + "version": "5.0.8", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/logger": "^5.0.8", + "@ethersproject/properties": "^5.0.7", + "elliptic": "6.5.3" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/strings": { + "version": "5.0.8", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/constants": "^5.0.8", + "@ethersproject/logger": "^5.0.8" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/transactions": { + "version": "5.0.9", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/address": "^5.0.9", + "@ethersproject/bignumber": "^5.0.13", + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/constants": "^5.0.8", + "@ethersproject/keccak256": "^5.0.7", + "@ethersproject/logger": "^5.0.8", + "@ethersproject/properties": "^5.0.7", + "@ethersproject/rlp": "^5.0.7", + "@ethersproject/signing-key": "^5.0.8" + } + }, + "node_modules/ganache-core/node_modules/@ethersproject/web": { + "version": "5.0.12", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@ethersproject/base64": "^5.0.7", + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/logger": "^5.0.8", + "@ethersproject/properties": "^5.0.7", + "@ethersproject/strings": "^5.0.8" + } + }, + "node_modules/ganache-core/node_modules/@sindresorhus/is": { + "version": "0.14.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/@szmarczak/http-timer": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "defer-to-connect": "^1.0.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/@types/bn.js": { + "version": "4.11.6", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/ganache-core/node_modules/@types/node": { + "version": "14.14.20", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/@types/pbkdf2": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/ganache-core/node_modules/@types/secp256k1": { + "version": "4.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/ganache-core/node_modules/@yarnpkg/lockfile": { + "version": "1.1.0", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/ganache-core/node_modules/abstract-leveldown": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "xtend": "~4.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/accepts": { + "version": "1.3.7", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "mime-types": "~2.1.24", + "negotiator": "0.6.2" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/aes-js": { + "version": "3.1.2", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/ajv": { + "version": "6.12.6", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ganache-core/node_modules/ansi-styles": { + "version": "3.2.1", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/arr-diff": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/arr-flatten": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/arr-union": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/array-flatten": { + "version": "1.1.1", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/array-unique": { + "version": "0.3.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/asn1": { + "version": "0.2.4", + "dev": true, + "license": "MIT", + "dependencies": { + "safer-buffer": "~2.1.0" + } + }, + "node_modules/ganache-core/node_modules/asn1.js": { + "version": "5.4.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "bn.js": "^4.0.0", + "inherits": "^2.0.1", + "minimalistic-assert": "^1.0.0", + "safer-buffer": "^2.1.0" + } + }, + "node_modules/ganache-core/node_modules/assert-plus": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/ganache-core/node_modules/assign-symbols": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/async": { + "version": "2.6.2", + "dev": true, + "license": "MIT", + "dependencies": { + "lodash": "^4.17.11" + } + }, + "node_modules/ganache-core/node_modules/async-eventemitter": { + "version": "0.2.4", + "dev": true, + "license": "MIT", + "dependencies": { + "async": "^2.4.0" + } + }, + "node_modules/ganache-core/node_modules/async-limiter": { + "version": "1.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/asynckit": { + "version": "0.4.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/atob": { + "version": "2.1.2", + "dev": true, + "license": "(MIT OR Apache-2.0)", + "bin": { + "atob": "bin/atob.js" + }, + "engines": { + "node": ">= 4.5.0" + } + }, + "node_modules/ganache-core/node_modules/aws-sign2": { + "version": "0.7.0", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "*" + } + }, + "node_modules/ganache-core/node_modules/aws4": { + "version": "1.11.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/babel-code-frame": { + "version": "6.26.0", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^1.1.3", + "esutils": "^2.0.2", + "js-tokens": "^3.0.2" + } + }, + "node_modules/ganache-core/node_modules/babel-code-frame/node_modules/ansi-regex": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/babel-code-frame/node_modules/ansi-styles": { + "version": "2.2.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/babel-code-frame/node_modules/chalk": { + "version": "1.1.3", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^2.2.1", + "escape-string-regexp": "^1.0.2", + "has-ansi": "^2.0.0", + "strip-ansi": "^3.0.0", + "supports-color": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/babel-code-frame/node_modules/js-tokens": { + "version": "3.0.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/babel-code-frame/node_modules/strip-ansi": { + "version": "3.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/babel-code-frame/node_modules/supports-color": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/ganache-core/node_modules/babel-core": { + "version": "6.26.3", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-code-frame": "^6.26.0", + "babel-generator": "^6.26.0", + "babel-helpers": "^6.24.1", + "babel-messages": "^6.23.0", + "babel-register": "^6.26.0", + "babel-runtime": "^6.26.0", + "babel-template": "^6.26.0", + "babel-traverse": "^6.26.0", + "babel-types": "^6.26.0", + "babylon": "^6.18.0", + "convert-source-map": "^1.5.1", + "debug": "^2.6.9", + "json5": "^0.5.1", + "lodash": "^4.17.4", + "minimatch": "^3.0.4", + "path-is-absolute": "^1.0.1", + "private": "^0.1.8", + "slash": "^1.0.0", + "source-map": "^0.5.7" + } + }, + "node_modules/ganache-core/node_modules/babel-core/node_modules/debug": { + "version": "2.6.9", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/ganache-core/node_modules/babel-core/node_modules/json5": { + "version": "0.5.1", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + } + }, + "node_modules/ganache-core/node_modules/babel-core/node_modules/ms": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/babel-core/node_modules/slash": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/babel-generator": { + "version": "6.26.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-messages": "^6.23.0", + "babel-runtime": "^6.26.0", + "babel-types": "^6.26.0", + "detect-indent": "^4.0.0", + "jsesc": "^1.3.0", + "lodash": "^4.17.4", + "source-map": "^0.5.7", + "trim-right": "^1.0.1" + } + }, + "node_modules/ganache-core/node_modules/babel-generator/node_modules/jsesc": { + "version": "1.3.0", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + } + }, + "node_modules/ganache-core/node_modules/babel-helper-builder-binary-assignment-operator-visitor": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-helper-explode-assignable-expression": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-helper-call-delegate": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-helper-hoist-variables": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-helper-define-map": { + "version": "6.26.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-helper-function-name": "^6.24.1", + "babel-runtime": "^6.26.0", + "babel-types": "^6.26.0", + "lodash": "^4.17.4" + } + }, + "node_modules/ganache-core/node_modules/babel-helper-explode-assignable-expression": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-helper-function-name": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-helper-get-function-arity": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-helper-get-function-arity": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-helper-hoist-variables": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-helper-optimise-call-expression": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-helper-regex": { + "version": "6.26.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.26.0", + "babel-types": "^6.26.0", + "lodash": "^4.17.4" + } + }, + "node_modules/ganache-core/node_modules/babel-helper-remap-async-to-generator": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-helper-function-name": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-helper-replace-supers": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-helper-optimise-call-expression": "^6.24.1", + "babel-messages": "^6.23.0", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-helpers": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-messages": { + "version": "6.23.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-check-es2015-constants": { + "version": "6.22.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-syntax-async-functions": { + "version": "6.13.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/babel-plugin-syntax-exponentiation-operator": { + "version": "6.13.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/babel-plugin-syntax-trailing-function-commas": { + "version": "6.22.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-async-to-generator": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-helper-remap-async-to-generator": "^6.24.1", + "babel-plugin-syntax-async-functions": "^6.8.0", + "babel-runtime": "^6.22.0" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-arrow-functions": { + "version": "6.22.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-block-scoped-functions": { + "version": "6.22.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-block-scoping": { + "version": "6.26.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.26.0", + "babel-template": "^6.26.0", + "babel-traverse": "^6.26.0", + "babel-types": "^6.26.0", + "lodash": "^4.17.4" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-classes": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-helper-define-map": "^6.24.1", + "babel-helper-function-name": "^6.24.1", + "babel-helper-optimise-call-expression": "^6.24.1", + "babel-helper-replace-supers": "^6.24.1", + "babel-messages": "^6.23.0", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-computed-properties": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-destructuring": { + "version": "6.23.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-duplicate-keys": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-for-of": { + "version": "6.23.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-function-name": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-helper-function-name": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-literals": { + "version": "6.22.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-modules-amd": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-plugin-transform-es2015-modules-commonjs": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-modules-commonjs": { + "version": "6.26.2", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-plugin-transform-strict-mode": "^6.24.1", + "babel-runtime": "^6.26.0", + "babel-template": "^6.26.0", + "babel-types": "^6.26.0" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-modules-systemjs": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-helper-hoist-variables": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-modules-umd": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-plugin-transform-es2015-modules-amd": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-object-super": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-helper-replace-supers": "^6.24.1", + "babel-runtime": "^6.22.0" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-parameters": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-helper-call-delegate": "^6.24.1", + "babel-helper-get-function-arity": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-shorthand-properties": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-spread": { + "version": "6.22.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-sticky-regex": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-helper-regex": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-template-literals": { + "version": "6.22.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-typeof-symbol": { + "version": "6.23.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-es2015-unicode-regex": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-helper-regex": "^6.24.1", + "babel-runtime": "^6.22.0", + "regexpu-core": "^2.0.0" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-exponentiation-operator": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-helper-builder-binary-assignment-operator-visitor": "^6.24.1", + "babel-plugin-syntax-exponentiation-operator": "^6.8.0", + "babel-runtime": "^6.22.0" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-regenerator": { + "version": "6.26.0", + "dev": true, + "license": "MIT", + "dependencies": { + "regenerator-transform": "^0.10.0" + } + }, + "node_modules/ganache-core/node_modules/babel-plugin-transform-strict-mode": { + "version": "6.24.1", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "node_modules/ganache-core/node_modules/babel-preset-env": { + "version": "1.7.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-plugin-check-es2015-constants": "^6.22.0", + "babel-plugin-syntax-trailing-function-commas": "^6.22.0", + "babel-plugin-transform-async-to-generator": "^6.22.0", + "babel-plugin-transform-es2015-arrow-functions": "^6.22.0", + "babel-plugin-transform-es2015-block-scoped-functions": "^6.22.0", + "babel-plugin-transform-es2015-block-scoping": "^6.23.0", + "babel-plugin-transform-es2015-classes": "^6.23.0", + "babel-plugin-transform-es2015-computed-properties": "^6.22.0", + "babel-plugin-transform-es2015-destructuring": "^6.23.0", + "babel-plugin-transform-es2015-duplicate-keys": "^6.22.0", + "babel-plugin-transform-es2015-for-of": "^6.23.0", + "babel-plugin-transform-es2015-function-name": "^6.22.0", + "babel-plugin-transform-es2015-literals": "^6.22.0", + "babel-plugin-transform-es2015-modules-amd": "^6.22.0", + "babel-plugin-transform-es2015-modules-commonjs": "^6.23.0", + "babel-plugin-transform-es2015-modules-systemjs": "^6.23.0", + "babel-plugin-transform-es2015-modules-umd": "^6.23.0", + "babel-plugin-transform-es2015-object-super": "^6.22.0", + "babel-plugin-transform-es2015-parameters": "^6.23.0", + "babel-plugin-transform-es2015-shorthand-properties": "^6.22.0", + "babel-plugin-transform-es2015-spread": "^6.22.0", + "babel-plugin-transform-es2015-sticky-regex": "^6.22.0", + "babel-plugin-transform-es2015-template-literals": "^6.22.0", + "babel-plugin-transform-es2015-typeof-symbol": "^6.23.0", + "babel-plugin-transform-es2015-unicode-regex": "^6.22.0", + "babel-plugin-transform-exponentiation-operator": "^6.22.0", + "babel-plugin-transform-regenerator": "^6.22.0", + "browserslist": "^3.2.6", + "invariant": "^2.2.2", + "semver": "^5.3.0" + } + }, + "node_modules/ganache-core/node_modules/babel-preset-env/node_modules/semver": { + "version": "5.7.1", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/ganache-core/node_modules/babel-register": { + "version": "6.26.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-core": "^6.26.0", + "babel-runtime": "^6.26.0", + "core-js": "^2.5.0", + "home-or-tmp": "^2.0.0", + "lodash": "^4.17.4", + "mkdirp": "^0.5.1", + "source-map-support": "^0.4.15" + } + }, + "node_modules/ganache-core/node_modules/babel-register/node_modules/source-map-support": { + "version": "0.4.18", + "dev": true, + "license": "MIT", + "dependencies": { + "source-map": "^0.5.6" + } + }, + "node_modules/ganache-core/node_modules/babel-runtime": { + "version": "6.26.0", + "dev": true, + "license": "MIT", + "dependencies": { + "core-js": "^2.4.0", + "regenerator-runtime": "^0.11.0" + } + }, + "node_modules/ganache-core/node_modules/babel-template": { + "version": "6.26.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.26.0", + "babel-traverse": "^6.26.0", + "babel-types": "^6.26.0", + "babylon": "^6.18.0", + "lodash": "^4.17.4" + } + }, + "node_modules/ganache-core/node_modules/babel-traverse": { + "version": "6.26.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-code-frame": "^6.26.0", + "babel-messages": "^6.23.0", + "babel-runtime": "^6.26.0", + "babel-types": "^6.26.0", + "babylon": "^6.18.0", + "debug": "^2.6.8", + "globals": "^9.18.0", + "invariant": "^2.2.2", + "lodash": "^4.17.4" + } + }, + "node_modules/ganache-core/node_modules/babel-traverse/node_modules/debug": { + "version": "2.6.9", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/ganache-core/node_modules/babel-traverse/node_modules/globals": { + "version": "9.18.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/babel-traverse/node_modules/ms": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/babel-types": { + "version": "6.26.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-runtime": "^6.26.0", + "esutils": "^2.0.2", + "lodash": "^4.17.4", + "to-fast-properties": "^1.0.3" + } + }, + "node_modules/ganache-core/node_modules/babel-types/node_modules/to-fast-properties": { + "version": "1.0.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/babelify": { + "version": "7.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-core": "^6.0.14", + "object-assign": "^4.0.0" + } + }, + "node_modules/ganache-core/node_modules/babylon": { + "version": "6.18.0", + "dev": true, + "license": "MIT", + "bin": { + "babylon": "bin/babylon.js" + } + }, + "node_modules/ganache-core/node_modules/backoff": { + "version": "2.5.0", + "dev": true, + "license": "MIT", + "dependencies": { + "precond": "0.2" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/balanced-match": { + "version": "1.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/base": { + "version": "0.11.2", + "dev": true, + "license": "MIT", + "dependencies": { + "cache-base": "^1.0.1", + "class-utils": "^0.3.5", + "component-emitter": "^1.2.1", + "define-property": "^1.0.0", + "isobject": "^3.0.1", + "mixin-deep": "^1.2.0", + "pascalcase": "^0.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/base-x": { + "version": "3.0.8", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, + "node_modules/ganache-core/node_modules/base/node_modules/define-property": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "is-descriptor": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/base64-js": { + "version": "1.5.1", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/bcrypt-pbkdf": { + "version": "1.0.2", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "tweetnacl": "^0.14.3" + } + }, + "node_modules/ganache-core/node_modules/bcrypt-pbkdf/node_modules/tweetnacl": { + "version": "0.14.5", + "dev": true, + "license": "Unlicense" + }, + "node_modules/ganache-core/node_modules/bignumber.js": { + "version": "9.0.1", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": "*" + } + }, + "node_modules/ganache-core/node_modules/bip39": { + "version": "2.5.0", + "dev": true, + "license": "ISC", + "dependencies": { + "create-hash": "^1.1.0", + "pbkdf2": "^3.0.9", + "randombytes": "^2.0.1", + "safe-buffer": "^5.0.1", + "unorm": "^1.3.3" + } + }, + "node_modules/ganache-core/node_modules/blakejs": { + "version": "1.1.0", + "dev": true, + "license": "CC0-1.0" + }, + "node_modules/ganache-core/node_modules/bluebird": { + "version": "3.7.2", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/bn.js": { + "version": "4.11.9", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/body-parser": { + "version": "1.19.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "bytes": "3.1.0", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "~1.1.2", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", + "on-finished": "~2.3.0", + "qs": "6.7.0", + "raw-body": "2.4.0", + "type-is": "~1.6.17" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/ganache-core/node_modules/body-parser/node_modules/debug": { + "version": "2.6.9", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/ganache-core/node_modules/body-parser/node_modules/ms": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/body-parser/node_modules/qs": { + "version": "6.7.0", + "dev": true, + "license": "BSD-3-Clause", + "optional": true, + "engines": { + "node": ">=0.6" + } + }, + "node_modules/ganache-core/node_modules/brace-expansion": { + "version": "1.1.11", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/ganache-core/node_modules/brorand": { + "version": "1.1.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/browserify-aes": { + "version": "1.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-xor": "^1.0.3", + "cipher-base": "^1.0.0", + "create-hash": "^1.1.0", + "evp_bytestokey": "^1.0.3", + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/ganache-core/node_modules/browserify-cipher": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "browserify-aes": "^1.0.4", + "browserify-des": "^1.0.0", + "evp_bytestokey": "^1.0.0" + } + }, + "node_modules/ganache-core/node_modules/browserify-des": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "cipher-base": "^1.0.1", + "des.js": "^1.0.0", + "inherits": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "node_modules/ganache-core/node_modules/browserify-rsa": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "bn.js": "^5.0.0", + "randombytes": "^2.0.1" + } + }, + "node_modules/ganache-core/node_modules/browserify-rsa/node_modules/bn.js": { + "version": "5.1.3", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/browserify-sign": { + "version": "4.2.1", + "dev": true, + "license": "ISC", + "optional": true, + "dependencies": { + "bn.js": "^5.1.1", + "browserify-rsa": "^4.0.1", + "create-hash": "^1.2.0", + "create-hmac": "^1.1.7", + "elliptic": "^6.5.3", + "inherits": "^2.0.4", + "parse-asn1": "^5.1.5", + "readable-stream": "^3.6.0", + "safe-buffer": "^5.2.0" + } + }, + "node_modules/ganache-core/node_modules/browserify-sign/node_modules/bn.js": { + "version": "5.1.3", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/browserify-sign/node_modules/readable-stream": { + "version": "3.6.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/ganache-core/node_modules/browserslist": { + "version": "3.2.8", + "dev": true, + "license": "MIT", + "dependencies": { + "caniuse-lite": "^1.0.30000844", + "electron-to-chromium": "^1.3.47" + }, + "bin": { + "browserslist": "cli.js" + } + }, + "node_modules/ganache-core/node_modules/bs58": { + "version": "4.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "base-x": "^3.0.2" + } + }, + "node_modules/ganache-core/node_modules/bs58check": { + "version": "2.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "bs58": "^4.0.0", + "create-hash": "^1.1.0", + "safe-buffer": "^5.1.2" + } + }, + "node_modules/ganache-core/node_modules/buffer": { + "version": "5.7.1", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/ganache-core/node_modules/buffer-from": { + "version": "1.1.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/buffer-to-arraybuffer": { + "version": "0.0.5", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/buffer-xor": { + "version": "1.0.3", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/bufferutil": { + "version": "4.0.3", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "node-gyp-build": "^4.2.0" + } + }, + "node_modules/ganache-core/node_modules/bytes": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/ganache-core/node_modules/bytewise": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "bytewise-core": "^1.2.2", + "typewise": "^1.0.3" + } + }, + "node_modules/ganache-core/node_modules/bytewise-core": { + "version": "1.2.3", + "dev": true, + "license": "MIT", + "dependencies": { + "typewise-core": "^1.2" + } + }, + "node_modules/ganache-core/node_modules/cache-base": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "collection-visit": "^1.0.0", + "component-emitter": "^1.2.1", + "get-value": "^2.0.6", + "has-value": "^1.0.0", + "isobject": "^3.0.1", + "set-value": "^2.0.0", + "to-object-path": "^0.3.0", + "union-value": "^1.0.0", + "unset-value": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/cacheable-request": { + "version": "6.1.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "clone-response": "^1.0.2", + "get-stream": "^5.1.0", + "http-cache-semantics": "^4.0.0", + "keyv": "^3.0.0", + "lowercase-keys": "^2.0.0", + "normalize-url": "^4.1.0", + "responselike": "^1.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ganache-core/node_modules/cacheable-request/node_modules/lowercase-keys": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ganache-core/node_modules/cachedown": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "abstract-leveldown": "^2.4.1", + "lru-cache": "^3.2.0" + } + }, + "node_modules/ganache-core/node_modules/cachedown/node_modules/abstract-leveldown": { + "version": "2.7.2", + "dev": true, + "license": "MIT", + "dependencies": { + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/cachedown/node_modules/lru-cache": { + "version": "3.2.0", + "dev": true, + "license": "ISC", + "dependencies": { + "pseudomap": "^1.0.1" + } + }, + "node_modules/ganache-core/node_modules/call-bind": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/caniuse-lite": { + "version": "1.0.30001174", + "dev": true, + "license": "CC-BY-4.0" + }, + "node_modules/ganache-core/node_modules/caseless": { + "version": "0.12.0", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/ganache-core/node_modules/chalk": { + "version": "2.4.2", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/checkpoint-store": { + "version": "1.1.0", + "dev": true, + "license": "ISC", + "dependencies": { + "functional-red-black-tree": "^1.0.1" + } + }, + "node_modules/ganache-core/node_modules/chownr": { + "version": "1.1.4", + "dev": true, + "license": "ISC", + "optional": true + }, + "node_modules/ganache-core/node_modules/ci-info": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/cids": { + "version": "0.7.5", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "buffer": "^5.5.0", + "class-is": "^1.1.0", + "multibase": "~0.6.0", + "multicodec": "^1.0.0", + "multihashes": "~0.4.15" + }, + "engines": { + "node": ">=4.0.0", + "npm": ">=3.0.0" + } + }, + "node_modules/ganache-core/node_modules/cids/node_modules/multicodec": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "buffer": "^5.6.0", + "varint": "^5.0.0" + } + }, + "node_modules/ganache-core/node_modules/cipher-base": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/ganache-core/node_modules/class-is": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/class-utils": { + "version": "0.3.6", + "dev": true, + "license": "MIT", + "dependencies": { + "arr-union": "^3.1.0", + "define-property": "^0.2.5", + "isobject": "^3.0.0", + "static-extend": "^0.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/class-utils/node_modules/define-property": { + "version": "0.2.5", + "dev": true, + "license": "MIT", + "dependencies": { + "is-descriptor": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/class-utils/node_modules/is-accessor-descriptor": { + "version": "0.1.6", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/class-utils/node_modules/is-accessor-descriptor/node_modules/kind-of": { + "version": "3.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/class-utils/node_modules/is-buffer": { + "version": "1.1.6", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/class-utils/node_modules/is-data-descriptor": { + "version": "0.1.4", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/class-utils/node_modules/is-data-descriptor/node_modules/kind-of": { + "version": "3.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/class-utils/node_modules/is-descriptor": { + "version": "0.1.6", + "dev": true, + "license": "MIT", + "dependencies": { + "is-accessor-descriptor": "^0.1.6", + "is-data-descriptor": "^0.1.4", + "kind-of": "^5.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/class-utils/node_modules/kind-of": { + "version": "5.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/clone": { + "version": "2.1.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/ganache-core/node_modules/clone-response": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "mimic-response": "^1.0.0" + } + }, + "node_modules/ganache-core/node_modules/collection-visit": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "map-visit": "^1.0.0", + "object-visit": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/color-convert": { + "version": "1.9.3", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/ganache-core/node_modules/color-name": { + "version": "1.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/combined-stream": { + "version": "1.0.8", + "dev": true, + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/ganache-core/node_modules/component-emitter": { + "version": "1.3.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/concat-map": { + "version": "0.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/concat-stream": { + "version": "1.6.2", + "dev": true, + "engines": [ + "node >= 0.8" + ], + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + } + }, + "node_modules/ganache-core/node_modules/content-disposition": { + "version": "0.5.3", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "safe-buffer": "5.1.2" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/content-disposition/node_modules/safe-buffer": { + "version": "5.1.2", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/content-hash": { + "version": "2.5.2", + "dev": true, + "license": "ISC", + "optional": true, + "dependencies": { + "cids": "^0.7.1", + "multicodec": "^0.5.5", + "multihashes": "^0.4.15" + } + }, + "node_modules/ganache-core/node_modules/content-type": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/convert-source-map": { + "version": "1.7.0", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.1.1" + } + }, + "node_modules/ganache-core/node_modules/convert-source-map/node_modules/safe-buffer": { + "version": "5.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/cookie": { + "version": "0.4.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/cookie-signature": { + "version": "1.0.6", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/cookiejar": { + "version": "2.1.2", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/copy-descriptor": { + "version": "0.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/core-js": { + "version": "2.6.12", + "dev": true, + "hasInstallScript": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/core-js-pure": { + "version": "3.8.2", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + } + }, + "node_modules/ganache-core/node_modules/core-util-is": { + "version": "1.0.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/cors": { + "version": "2.8.5", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/ganache-core/node_modules/create-ecdh": { + "version": "4.0.4", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "bn.js": "^4.1.0", + "elliptic": "^6.5.3" + } + }, + "node_modules/ganache-core/node_modules/create-hash": { + "version": "1.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "cipher-base": "^1.0.1", + "inherits": "^2.0.1", + "md5.js": "^1.3.4", + "ripemd160": "^2.0.1", + "sha.js": "^2.4.0" + } + }, + "node_modules/ganache-core/node_modules/create-hmac": { + "version": "1.1.7", + "dev": true, + "license": "MIT", + "dependencies": { + "cipher-base": "^1.0.3", + "create-hash": "^1.1.0", + "inherits": "^2.0.1", + "ripemd160": "^2.0.0", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + } + }, + "node_modules/ganache-core/node_modules/cross-fetch": { + "version": "2.2.3", + "dev": true, + "license": "MIT", + "dependencies": { + "node-fetch": "2.1.2", + "whatwg-fetch": "2.0.4" + } + }, + "node_modules/ganache-core/node_modules/crypto-browserify": { + "version": "3.12.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "browserify-cipher": "^1.0.0", + "browserify-sign": "^4.0.0", + "create-ecdh": "^4.0.0", + "create-hash": "^1.1.0", + "create-hmac": "^1.1.0", + "diffie-hellman": "^5.0.0", + "inherits": "^2.0.1", + "pbkdf2": "^3.0.3", + "public-encrypt": "^4.0.0", + "randombytes": "^2.0.0", + "randomfill": "^1.0.3" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ganache-core/node_modules/d": { + "version": "1.0.1", + "dev": true, + "license": "ISC", + "dependencies": { + "es5-ext": "^0.10.50", + "type": "^1.0.1" + } + }, + "node_modules/ganache-core/node_modules/dashdash": { + "version": "1.14.1", + "dev": true, + "license": "MIT", + "dependencies": { + "assert-plus": "^1.0.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/ganache-core/node_modules/debug": { + "version": "3.2.6", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/ganache-core/node_modules/decode-uri-component": { + "version": "0.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/ganache-core/node_modules/decompress-response": { + "version": "3.3.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "mimic-response": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/deep-equal": { + "version": "1.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-arguments": "^1.0.4", + "is-date-object": "^1.0.1", + "is-regex": "^1.0.4", + "object-is": "^1.0.1", + "object-keys": "^1.1.1", + "regexp.prototype.flags": "^1.2.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/defer-to-connect": { + "version": "1.1.3", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/deferred-leveldown": { + "version": "4.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "abstract-leveldown": "~5.0.0", + "inherits": "^2.0.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/deferred-leveldown/node_modules/abstract-leveldown": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "xtend": "~4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/define-properties": { + "version": "1.1.3", + "dev": true, + "license": "MIT", + "dependencies": { + "object-keys": "^1.0.12" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ganache-core/node_modules/define-property": { + "version": "2.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-descriptor": "^1.0.2", + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/defined": { + "version": "1.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/delayed-stream": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/ganache-core/node_modules/depd": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/des.js": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "inherits": "^2.0.1", + "minimalistic-assert": "^1.0.0" + } + }, + "node_modules/ganache-core/node_modules/destroy": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/detect-indent": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "repeating": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/diffie-hellman": { + "version": "5.0.3", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "bn.js": "^4.1.0", + "miller-rabin": "^4.0.0", + "randombytes": "^2.0.0" + } + }, + "node_modules/ganache-core/node_modules/dom-walk": { + "version": "0.1.2", + "dev": true + }, + "node_modules/ganache-core/node_modules/dotignore": { + "version": "0.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "minimatch": "^3.0.4" + }, + "bin": { + "ignored": "bin/ignored" + } + }, + "node_modules/ganache-core/node_modules/duplexer3": { + "version": "0.1.4", + "dev": true, + "license": "BSD-3-Clause", + "optional": true + }, + "node_modules/ganache-core/node_modules/ecc-jsbn": { + "version": "0.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } + }, + "node_modules/ganache-core/node_modules/ee-first": { + "version": "1.1.1", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/electron-to-chromium": { + "version": "1.3.636", + "dev": true, + "license": "ISC" + }, + "node_modules/ganache-core/node_modules/elliptic": { + "version": "6.5.3", + "dev": true, + "license": "MIT", + "dependencies": { + "bn.js": "^4.4.0", + "brorand": "^1.0.1", + "hash.js": "^1.0.0", + "hmac-drbg": "^1.0.0", + "inherits": "^2.0.1", + "minimalistic-assert": "^1.0.0", + "minimalistic-crypto-utils": "^1.0.0" + } + }, + "node_modules/ganache-core/node_modules/encodeurl": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/ganache-core/node_modules/encoding": { + "version": "0.1.13", + "dev": true, + "license": "MIT", + "dependencies": { + "iconv-lite": "^0.6.2" + } + }, + "node_modules/ganache-core/node_modules/encoding-down": { + "version": "5.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "abstract-leveldown": "^5.0.0", + "inherits": "^2.0.3", + "level-codec": "^9.0.0", + "level-errors": "^2.0.0", + "xtend": "^4.0.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/encoding-down/node_modules/abstract-leveldown": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "xtend": "~4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/encoding/node_modules/iconv-lite": { + "version": "0.6.2", + "dev": true, + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/end-of-stream": { + "version": "1.4.4", + "dev": true, + "license": "MIT", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/ganache-core/node_modules/errno": { + "version": "0.1.8", + "dev": true, + "license": "MIT", + "dependencies": { + "prr": "~1.0.1" + }, + "bin": { + "errno": "cli.js" + } + }, + "node_modules/ganache-core/node_modules/es-abstract": { + "version": "1.18.0-next.1", + "dev": true, + "license": "MIT", + "dependencies": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-negative-zero": "^2.0.0", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/es-to-primitive": { + "version": "1.2.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/es5-ext": { + "version": "0.10.53", + "dev": true, + "license": "ISC", + "dependencies": { + "es6-iterator": "~2.0.3", + "es6-symbol": "~3.1.3", + "next-tick": "~1.0.0" + } + }, + "node_modules/ganache-core/node_modules/es6-iterator": { + "version": "2.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "d": "1", + "es5-ext": "^0.10.35", + "es6-symbol": "^3.1.1" + } + }, + "node_modules/ganache-core/node_modules/es6-symbol": { + "version": "3.1.3", + "dev": true, + "license": "ISC", + "dependencies": { + "d": "^1.0.1", + "ext": "^1.1.2" + } + }, + "node_modules/ganache-core/node_modules/escape-html": { + "version": "1.0.3", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/escape-string-regexp": { + "version": "1.0.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/ganache-core/node_modules/esutils": { + "version": "2.0.3", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/etag": { + "version": "1.8.1", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/eth-block-tracker": { + "version": "3.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "eth-query": "^2.1.0", + "ethereumjs-tx": "^1.3.3", + "ethereumjs-util": "^5.1.3", + "ethjs-util": "^0.1.3", + "json-rpc-engine": "^3.6.0", + "pify": "^2.3.0", + "tape": "^4.6.3" + } + }, + "node_modules/ganache-core/node_modules/eth-block-tracker/node_modules/ethereumjs-tx": { + "version": "1.3.7", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "ethereum-common": "^0.0.18", + "ethereumjs-util": "^5.0.0" + } + }, + "node_modules/ganache-core/node_modules/eth-block-tracker/node_modules/ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/eth-block-tracker/node_modules/pify": { + "version": "2.3.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/eth-ens-namehash": { + "version": "2.0.8", + "dev": true, + "license": "ISC", + "optional": true, + "dependencies": { + "idna-uts46-hx": "^2.3.1", + "js-sha3": "^0.5.7" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-infura": { + "version": "3.2.1", + "dev": true, + "license": "ISC", + "dependencies": { + "cross-fetch": "^2.1.1", + "eth-json-rpc-middleware": "^1.5.0", + "json-rpc-engine": "^3.4.0", + "json-rpc-error": "^2.0.0" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware": { + "version": "1.6.0", + "dev": true, + "license": "ISC", + "dependencies": { + "async": "^2.5.0", + "eth-query": "^2.1.2", + "eth-tx-summary": "^3.1.2", + "ethereumjs-block": "^1.6.0", + "ethereumjs-tx": "^1.3.3", + "ethereumjs-util": "^5.1.2", + "ethereumjs-vm": "^2.1.0", + "fetch-ponyfill": "^4.0.0", + "json-rpc-engine": "^3.6.0", + "json-rpc-error": "^2.0.0", + "json-stable-stringify": "^1.0.1", + "promise-to-callback": "^1.0.0", + "tape": "^4.6.3" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/abstract-leveldown": { + "version": "2.6.3", + "dev": true, + "license": "MIT", + "dependencies": { + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/deferred-leveldown": { + "version": "1.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "abstract-leveldown": "~2.6.0" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/ethereumjs-account": { + "version": "2.0.5", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "ethereumjs-util": "^5.0.0", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/ethereumjs-block": { + "version": "1.7.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^2.0.1", + "ethereum-common": "0.2.0", + "ethereumjs-tx": "^1.2.2", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/ethereumjs-block/node_modules/ethereum-common": { + "version": "0.2.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/ethereumjs-tx": { + "version": "1.3.7", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "ethereum-common": "^0.0.18", + "ethereumjs-util": "^5.0.0" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/ethereumjs-vm": { + "version": "2.6.0", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^2.1.2", + "async-eventemitter": "^0.2.2", + "ethereumjs-account": "^2.0.3", + "ethereumjs-block": "~2.2.0", + "ethereumjs-common": "^1.1.0", + "ethereumjs-util": "^6.0.0", + "fake-merkle-patricia-tree": "^1.0.1", + "functional-red-black-tree": "^1.0.1", + "merkle-patricia-tree": "^2.3.2", + "rustbn.js": "~0.2.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/ethereumjs-vm/node_modules/ethereumjs-block": { + "version": "2.2.2", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^2.0.1", + "ethereumjs-common": "^1.5.0", + "ethereumjs-tx": "^2.1.1", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/ethereumjs-vm/node_modules/ethereumjs-block/node_modules/ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/ethereumjs-vm/node_modules/ethereumjs-tx": { + "version": "2.1.2", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "ethereumjs-common": "^1.5.0", + "ethereumjs-util": "^6.0.0" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/ethereumjs-vm/node_modules/ethereumjs-util": { + "version": "6.2.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "@types/bn.js": "^4.11.3", + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "0.1.6", + "rlp": "^2.2.3" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/isarray": { + "version": "0.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/level-codec": { + "version": "7.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/level-errors": { + "version": "1.0.5", + "dev": true, + "license": "MIT", + "dependencies": { + "errno": "~0.1.1" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/level-iterator-stream": { + "version": "1.3.1", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.1", + "level-errors": "^1.0.3", + "readable-stream": "^1.0.33", + "xtend": "^4.0.0" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/level-iterator-stream/node_modules/readable-stream": { + "version": "1.1.14", + "dev": true, + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/level-ws": { + "version": "0.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "readable-stream": "~1.0.15", + "xtend": "~2.1.1" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/level-ws/node_modules/readable-stream": { + "version": "1.0.34", + "dev": true, + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/level-ws/node_modules/xtend": { + "version": "2.1.2", + "dev": true, + "dependencies": { + "object-keys": "~0.4.0" + }, + "engines": { + "node": ">=0.4" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/levelup": { + "version": "1.3.9", + "dev": true, + "license": "MIT", + "dependencies": { + "deferred-leveldown": "~1.2.1", + "level-codec": "~7.0.0", + "level-errors": "~1.0.3", + "level-iterator-stream": "~1.3.0", + "prr": "~1.0.1", + "semver": "~5.4.1", + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/ltgt": { + "version": "2.2.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/memdown": { + "version": "1.4.1", + "dev": true, + "license": "MIT", + "dependencies": { + "abstract-leveldown": "~2.7.1", + "functional-red-black-tree": "^1.0.1", + "immediate": "^3.2.3", + "inherits": "~2.0.1", + "ltgt": "~2.2.0", + "safe-buffer": "~5.1.1" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/memdown/node_modules/abstract-leveldown": { + "version": "2.7.2", + "dev": true, + "license": "MIT", + "dependencies": { + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/merkle-patricia-tree": { + "version": "2.3.2", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^1.4.2", + "ethereumjs-util": "^5.0.0", + "level-ws": "0.0.0", + "levelup": "^1.2.1", + "memdown": "^1.0.0", + "readable-stream": "^2.0.0", + "rlp": "^2.0.0", + "semaphore": ">=1.0.1" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/merkle-patricia-tree/node_modules/async": { + "version": "1.5.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/object-keys": { + "version": "0.4.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/safe-buffer": { + "version": "5.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/semver": { + "version": "5.4.1", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/ganache-core/node_modules/eth-json-rpc-middleware/node_modules/string_decoder": { + "version": "0.10.31", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/eth-lib": { + "version": "0.1.29", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "bn.js": "^4.11.6", + "elliptic": "^6.4.0", + "nano-json-stream-parser": "^0.1.2", + "servify": "^0.1.12", + "ws": "^3.0.0", + "xhr-request-promise": "^0.1.2" + } + }, + "node_modules/ganache-core/node_modules/eth-query": { + "version": "2.1.2", + "dev": true, + "license": "ISC", + "dependencies": { + "json-rpc-random-id": "^1.0.0", + "xtend": "^4.0.1" + } + }, + "node_modules/ganache-core/node_modules/eth-sig-util": { + "version": "3.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "buffer": "^5.2.1", + "elliptic": "^6.4.0", + "ethereumjs-abi": "0.6.5", + "ethereumjs-util": "^5.1.1", + "tweetnacl": "^1.0.0", + "tweetnacl-util": "^0.15.0" + } + }, + "node_modules/ganache-core/node_modules/eth-sig-util/node_modules/ethereumjs-abi": { + "version": "0.6.5", + "dev": true, + "license": "MIT", + "dependencies": { + "bn.js": "^4.10.0", + "ethereumjs-util": "^4.3.0" + } + }, + "node_modules/ganache-core/node_modules/eth-sig-util/node_modules/ethereumjs-abi/node_modules/ethereumjs-util": { + "version": "4.5.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "bn.js": "^4.8.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.0.0" + } + }, + "node_modules/ganache-core/node_modules/eth-sig-util/node_modules/ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary": { + "version": "3.2.4", + "dev": true, + "license": "ISC", + "dependencies": { + "async": "^2.1.2", + "clone": "^2.0.0", + "concat-stream": "^1.5.1", + "end-of-stream": "^1.1.0", + "eth-query": "^2.0.2", + "ethereumjs-block": "^1.4.1", + "ethereumjs-tx": "^1.1.1", + "ethereumjs-util": "^5.0.1", + "ethereumjs-vm": "^2.6.0", + "through2": "^2.0.3" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/abstract-leveldown": { + "version": "2.6.3", + "dev": true, + "license": "MIT", + "dependencies": { + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/deferred-leveldown": { + "version": "1.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "abstract-leveldown": "~2.6.0" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/ethereumjs-account": { + "version": "2.0.5", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "ethereumjs-util": "^5.0.0", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/ethereumjs-block": { + "version": "1.7.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^2.0.1", + "ethereum-common": "0.2.0", + "ethereumjs-tx": "^1.2.2", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/ethereumjs-block/node_modules/ethereum-common": { + "version": "0.2.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/ethereumjs-tx": { + "version": "1.3.7", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "ethereum-common": "^0.0.18", + "ethereumjs-util": "^5.0.0" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/ethereumjs-vm": { + "version": "2.6.0", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^2.1.2", + "async-eventemitter": "^0.2.2", + "ethereumjs-account": "^2.0.3", + "ethereumjs-block": "~2.2.0", + "ethereumjs-common": "^1.1.0", + "ethereumjs-util": "^6.0.0", + "fake-merkle-patricia-tree": "^1.0.1", + "functional-red-black-tree": "^1.0.1", + "merkle-patricia-tree": "^2.3.2", + "rustbn.js": "~0.2.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/ethereumjs-vm/node_modules/ethereumjs-block": { + "version": "2.2.2", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^2.0.1", + "ethereumjs-common": "^1.5.0", + "ethereumjs-tx": "^2.1.1", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/ethereumjs-vm/node_modules/ethereumjs-block/node_modules/ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/ethereumjs-vm/node_modules/ethereumjs-tx": { + "version": "2.1.2", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "ethereumjs-common": "^1.5.0", + "ethereumjs-util": "^6.0.0" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/ethereumjs-vm/node_modules/ethereumjs-util": { + "version": "6.2.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "@types/bn.js": "^4.11.3", + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "0.1.6", + "rlp": "^2.2.3" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/isarray": { + "version": "0.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/level-codec": { + "version": "7.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/level-errors": { + "version": "1.0.5", + "dev": true, + "license": "MIT", + "dependencies": { + "errno": "~0.1.1" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/level-iterator-stream": { + "version": "1.3.1", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.1", + "level-errors": "^1.0.3", + "readable-stream": "^1.0.33", + "xtend": "^4.0.0" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/level-iterator-stream/node_modules/readable-stream": { + "version": "1.1.14", + "dev": true, + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/level-ws": { + "version": "0.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "readable-stream": "~1.0.15", + "xtend": "~2.1.1" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/level-ws/node_modules/readable-stream": { + "version": "1.0.34", + "dev": true, + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/level-ws/node_modules/xtend": { + "version": "2.1.2", + "dev": true, + "dependencies": { + "object-keys": "~0.4.0" + }, + "engines": { + "node": ">=0.4" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/levelup": { + "version": "1.3.9", + "dev": true, + "license": "MIT", + "dependencies": { + "deferred-leveldown": "~1.2.1", + "level-codec": "~7.0.0", + "level-errors": "~1.0.3", + "level-iterator-stream": "~1.3.0", + "prr": "~1.0.1", + "semver": "~5.4.1", + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/ltgt": { + "version": "2.2.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/memdown": { + "version": "1.4.1", + "dev": true, + "license": "MIT", + "dependencies": { + "abstract-leveldown": "~2.7.1", + "functional-red-black-tree": "^1.0.1", + "immediate": "^3.2.3", + "inherits": "~2.0.1", + "ltgt": "~2.2.0", + "safe-buffer": "~5.1.1" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/memdown/node_modules/abstract-leveldown": { + "version": "2.7.2", + "dev": true, + "license": "MIT", + "dependencies": { + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/merkle-patricia-tree": { + "version": "2.3.2", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^1.4.2", + "ethereumjs-util": "^5.0.0", + "level-ws": "0.0.0", + "levelup": "^1.2.1", + "memdown": "^1.0.0", + "readable-stream": "^2.0.0", + "rlp": "^2.0.0", + "semaphore": ">=1.0.1" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/merkle-patricia-tree/node_modules/async": { + "version": "1.5.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/object-keys": { + "version": "0.4.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/safe-buffer": { + "version": "5.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/semver": { + "version": "5.4.1", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/ganache-core/node_modules/eth-tx-summary/node_modules/string_decoder": { + "version": "0.10.31", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethashjs": { + "version": "0.0.8", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^2.1.2", + "buffer-xor": "^2.0.1", + "ethereumjs-util": "^7.0.2", + "miller-rabin": "^4.0.0" + } + }, + "node_modules/ganache-core/node_modules/ethashjs/node_modules/bn.js": { + "version": "5.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethashjs/node_modules/buffer-xor": { + "version": "2.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/ethashjs/node_modules/ethereumjs-util": { + "version": "7.0.7", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "@types/bn.js": "^4.11.3", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "0.1.6", + "rlp": "^2.2.4" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/ganache-core/node_modules/ethereum-bloom-filters": { + "version": "1.0.7", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "js-sha3": "^0.8.0" + } + }, + "node_modules/ganache-core/node_modules/ethereum-bloom-filters/node_modules/js-sha3": { + "version": "0.8.0", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/ethereum-common": { + "version": "0.0.18", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethereum-cryptography": { + "version": "0.1.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/pbkdf2": "^3.0.0", + "@types/secp256k1": "^4.0.1", + "blakejs": "^1.1.0", + "browserify-aes": "^1.2.0", + "bs58check": "^2.1.2", + "create-hash": "^1.2.0", + "create-hmac": "^1.1.7", + "hash.js": "^1.1.7", + "keccak": "^3.0.0", + "pbkdf2": "^3.0.17", + "randombytes": "^2.1.0", + "safe-buffer": "^5.1.2", + "scrypt-js": "^3.0.0", + "secp256k1": "^4.0.1", + "setimmediate": "^1.0.5" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-abi": { + "version": "0.6.8", + "dev": true, + "license": "MIT", + "dependencies": { + "bn.js": "^4.11.8", + "ethereumjs-util": "^6.0.0" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-account": { + "version": "3.0.0", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "ethereumjs-util": "^6.0.0", + "rlp": "^2.2.1", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-block": { + "version": "2.2.2", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^2.0.1", + "ethereumjs-common": "^1.5.0", + "ethereumjs-tx": "^2.1.1", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/abstract-leveldown": { + "version": "2.6.3", + "dev": true, + "license": "MIT", + "dependencies": { + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/deferred-leveldown": { + "version": "1.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "abstract-leveldown": "~2.6.0" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/isarray": { + "version": "0.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/level-codec": { + "version": "7.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/level-errors": { + "version": "1.0.5", + "dev": true, + "license": "MIT", + "dependencies": { + "errno": "~0.1.1" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/level-iterator-stream": { + "version": "1.3.1", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.1", + "level-errors": "^1.0.3", + "readable-stream": "^1.0.33", + "xtend": "^4.0.0" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/level-iterator-stream/node_modules/readable-stream": { + "version": "1.1.14", + "dev": true, + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/level-ws": { + "version": "0.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "readable-stream": "~1.0.15", + "xtend": "~2.1.1" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/level-ws/node_modules/readable-stream": { + "version": "1.0.34", + "dev": true, + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/level-ws/node_modules/xtend": { + "version": "2.1.2", + "dev": true, + "dependencies": { + "object-keys": "~0.4.0" + }, + "engines": { + "node": ">=0.4" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/levelup": { + "version": "1.3.9", + "dev": true, + "license": "MIT", + "dependencies": { + "deferred-leveldown": "~1.2.1", + "level-codec": "~7.0.0", + "level-errors": "~1.0.3", + "level-iterator-stream": "~1.3.0", + "prr": "~1.0.1", + "semver": "~5.4.1", + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/ltgt": { + "version": "2.2.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/memdown": { + "version": "1.4.1", + "dev": true, + "license": "MIT", + "dependencies": { + "abstract-leveldown": "~2.7.1", + "functional-red-black-tree": "^1.0.1", + "immediate": "^3.2.3", + "inherits": "~2.0.1", + "ltgt": "~2.2.0", + "safe-buffer": "~5.1.1" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/memdown/node_modules/abstract-leveldown": { + "version": "2.7.2", + "dev": true, + "license": "MIT", + "dependencies": { + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/merkle-patricia-tree": { + "version": "2.3.2", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^1.4.2", + "ethereumjs-util": "^5.0.0", + "level-ws": "0.0.0", + "levelup": "^1.2.1", + "memdown": "^1.0.0", + "readable-stream": "^2.0.0", + "rlp": "^2.0.0", + "semaphore": ">=1.0.1" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/merkle-patricia-tree/node_modules/async": { + "version": "1.5.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/object-keys": { + "version": "0.4.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/safe-buffer": { + "version": "5.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/semver": { + "version": "5.4.1", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-block/node_modules/string_decoder": { + "version": "0.10.31", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethereumjs-blockchain": { + "version": "4.0.4", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^2.6.1", + "ethashjs": "~0.0.7", + "ethereumjs-block": "~2.2.2", + "ethereumjs-common": "^1.5.0", + "ethereumjs-util": "^6.1.0", + "flow-stoplight": "^1.0.0", + "level-mem": "^3.0.1", + "lru-cache": "^5.1.1", + "rlp": "^2.2.2", + "semaphore": "^1.1.0" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-common": { + "version": "1.5.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethereumjs-tx": { + "version": "2.1.2", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "ethereumjs-common": "^1.5.0", + "ethereumjs-util": "^6.0.0" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-util": { + "version": "6.2.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "@types/bn.js": "^4.11.3", + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "0.1.6", + "rlp": "^2.2.3" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm": { + "version": "4.2.0", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^2.1.2", + "async-eventemitter": "^0.2.2", + "core-js-pure": "^3.0.1", + "ethereumjs-account": "^3.0.0", + "ethereumjs-block": "^2.2.2", + "ethereumjs-blockchain": "^4.0.3", + "ethereumjs-common": "^1.5.0", + "ethereumjs-tx": "^2.1.2", + "ethereumjs-util": "^6.2.0", + "fake-merkle-patricia-tree": "^1.0.1", + "functional-red-black-tree": "^1.0.1", + "merkle-patricia-tree": "^2.3.2", + "rustbn.js": "~0.2.0", + "safe-buffer": "^5.1.1", + "util.promisify": "^1.0.0" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/abstract-leveldown": { + "version": "2.6.3", + "dev": true, + "license": "MIT", + "dependencies": { + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/deferred-leveldown": { + "version": "1.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "abstract-leveldown": "~2.6.0" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/isarray": { + "version": "0.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/level-codec": { + "version": "7.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/level-errors": { + "version": "1.0.5", + "dev": true, + "license": "MIT", + "dependencies": { + "errno": "~0.1.1" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/level-iterator-stream": { + "version": "1.3.1", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.1", + "level-errors": "^1.0.3", + "readable-stream": "^1.0.33", + "xtend": "^4.0.0" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/level-iterator-stream/node_modules/readable-stream": { + "version": "1.1.14", + "dev": true, + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/level-ws": { + "version": "0.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "readable-stream": "~1.0.15", + "xtend": "~2.1.1" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/level-ws/node_modules/readable-stream": { + "version": "1.0.34", + "dev": true, + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/level-ws/node_modules/xtend": { + "version": "2.1.2", + "dev": true, + "dependencies": { + "object-keys": "~0.4.0" + }, + "engines": { + "node": ">=0.4" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/levelup": { + "version": "1.3.9", + "dev": true, + "license": "MIT", + "dependencies": { + "deferred-leveldown": "~1.2.1", + "level-codec": "~7.0.0", + "level-errors": "~1.0.3", + "level-iterator-stream": "~1.3.0", + "prr": "~1.0.1", + "semver": "~5.4.1", + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/ltgt": { + "version": "2.2.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/memdown": { + "version": "1.4.1", + "dev": true, + "license": "MIT", + "dependencies": { + "abstract-leveldown": "~2.7.1", + "functional-red-black-tree": "^1.0.1", + "immediate": "^3.2.3", + "inherits": "~2.0.1", + "ltgt": "~2.2.0", + "safe-buffer": "~5.1.1" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/memdown/node_modules/abstract-leveldown": { + "version": "2.7.2", + "dev": true, + "license": "MIT", + "dependencies": { + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/merkle-patricia-tree": { + "version": "2.3.2", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^1.4.2", + "ethereumjs-util": "^5.0.0", + "level-ws": "0.0.0", + "levelup": "^1.2.1", + "memdown": "^1.0.0", + "readable-stream": "^2.0.0", + "rlp": "^2.0.0", + "semaphore": ">=1.0.1" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/merkle-patricia-tree/node_modules/async": { + "version": "1.5.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/merkle-patricia-tree/node_modules/ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/object-keys": { + "version": "0.4.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/safe-buffer": { + "version": "5.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/semver": { + "version": "5.4.1", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/ganache-core/node_modules/ethereumjs-vm/node_modules/string_decoder": { + "version": "0.10.31", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ethereumjs-wallet": { + "version": "0.6.5", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "aes-js": "^3.1.1", + "bs58check": "^2.1.2", + "ethereum-cryptography": "^0.1.3", + "ethereumjs-util": "^6.0.0", + "randombytes": "^2.0.6", + "safe-buffer": "^5.1.2", + "scryptsy": "^1.2.1", + "utf8": "^3.0.0", + "uuid": "^3.3.2" + } + }, + "node_modules/ganache-core/node_modules/ethjs-unit": { + "version": "0.1.6", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "bn.js": "4.11.6", + "number-to-bn": "1.7.0" + }, + "engines": { + "node": ">=6.5.0", + "npm": ">=3" + } + }, + "node_modules/ganache-core/node_modules/ethjs-unit/node_modules/bn.js": { + "version": "4.11.6", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/ethjs-util": { + "version": "0.1.6", + "dev": true, + "license": "MIT", + "dependencies": { + "is-hex-prefixed": "1.0.0", + "strip-hex-prefix": "1.0.0" + }, + "engines": { + "node": ">=6.5.0", + "npm": ">=3" + } + }, + "node_modules/ganache-core/node_modules/eventemitter3": { + "version": "4.0.4", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/events": { + "version": "3.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/ganache-core/node_modules/evp_bytestokey": { + "version": "1.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "md5.js": "^1.3.4", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/expand-brackets": { + "version": "2.1.4", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^2.3.3", + "define-property": "^0.2.5", + "extend-shallow": "^2.0.1", + "posix-character-classes": "^0.1.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/expand-brackets/node_modules/debug": { + "version": "2.6.9", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/ganache-core/node_modules/expand-brackets/node_modules/define-property": { + "version": "0.2.5", + "dev": true, + "license": "MIT", + "dependencies": { + "is-descriptor": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/expand-brackets/node_modules/extend-shallow": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/expand-brackets/node_modules/is-accessor-descriptor": { + "version": "0.1.6", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/expand-brackets/node_modules/is-accessor-descriptor/node_modules/kind-of": { + "version": "3.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/expand-brackets/node_modules/is-buffer": { + "version": "1.1.6", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/expand-brackets/node_modules/is-data-descriptor": { + "version": "0.1.4", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/expand-brackets/node_modules/is-data-descriptor/node_modules/kind-of": { + "version": "3.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/expand-brackets/node_modules/is-descriptor": { + "version": "0.1.6", + "dev": true, + "license": "MIT", + "dependencies": { + "is-accessor-descriptor": "^0.1.6", + "is-data-descriptor": "^0.1.4", + "kind-of": "^5.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/expand-brackets/node_modules/is-extendable": { + "version": "0.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/expand-brackets/node_modules/kind-of": { + "version": "5.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/expand-brackets/node_modules/ms": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/express": { + "version": "4.17.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "accepts": "~1.3.7", + "array-flatten": "1.1.1", + "body-parser": "1.19.0", + "content-disposition": "0.5.3", + "content-type": "~1.0.4", + "cookie": "0.4.0", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "~1.1.2", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "~1.1.2", + "fresh": "0.5.2", + "merge-descriptors": "1.0.1", + "methods": "~1.1.2", + "on-finished": "~2.3.0", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.7", + "proxy-addr": "~2.0.5", + "qs": "6.7.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.1.2", + "send": "0.17.1", + "serve-static": "1.14.1", + "setprototypeof": "1.1.1", + "statuses": "~1.5.0", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/ganache-core/node_modules/express/node_modules/debug": { + "version": "2.6.9", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/ganache-core/node_modules/express/node_modules/ms": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/express/node_modules/qs": { + "version": "6.7.0", + "dev": true, + "license": "BSD-3-Clause", + "optional": true, + "engines": { + "node": ">=0.6" + } + }, + "node_modules/ganache-core/node_modules/express/node_modules/safe-buffer": { + "version": "5.1.2", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/ext": { + "version": "1.4.0", + "dev": true, + "license": "ISC", + "dependencies": { + "type": "^2.0.0" + } + }, + "node_modules/ganache-core/node_modules/ext/node_modules/type": { + "version": "2.1.0", + "dev": true, + "license": "ISC" + }, + "node_modules/ganache-core/node_modules/extend": { + "version": "3.0.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/extend-shallow": { + "version": "3.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "assign-symbols": "^1.0.0", + "is-extendable": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/extglob": { + "version": "2.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "array-unique": "^0.3.2", + "define-property": "^1.0.0", + "expand-brackets": "^2.1.4", + "extend-shallow": "^2.0.1", + "fragment-cache": "^0.2.1", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/extglob/node_modules/define-property": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "is-descriptor": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/extglob/node_modules/extend-shallow": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/extglob/node_modules/is-extendable": { + "version": "0.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/extsprintf": { + "version": "1.3.0", + "dev": true, + "engines": [ + "node >=0.6.0" + ], + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/fake-merkle-patricia-tree": { + "version": "1.0.1", + "dev": true, + "license": "ISC", + "dependencies": { + "checkpoint-store": "^1.1.0" + } + }, + "node_modules/ganache-core/node_modules/fast-deep-equal": { + "version": "3.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/fetch-ponyfill": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "node-fetch": "~1.7.1" + } + }, + "node_modules/ganache-core/node_modules/fetch-ponyfill/node_modules/is-stream": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/fetch-ponyfill/node_modules/node-fetch": { + "version": "1.7.3", + "dev": true, + "license": "MIT", + "dependencies": { + "encoding": "^0.1.11", + "is-stream": "^1.0.1" + } + }, + "node_modules/ganache-core/node_modules/finalhandler": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "on-finished": "~2.3.0", + "parseurl": "~1.3.3", + "statuses": "~1.5.0", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/ganache-core/node_modules/finalhandler/node_modules/debug": { + "version": "2.6.9", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/ganache-core/node_modules/finalhandler/node_modules/ms": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/find-yarn-workspace-root": { + "version": "1.2.1", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "fs-extra": "^4.0.3", + "micromatch": "^3.1.4" + } + }, + "node_modules/ganache-core/node_modules/find-yarn-workspace-root/node_modules/braces": { + "version": "2.3.2", + "dev": true, + "license": "MIT", + "dependencies": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/find-yarn-workspace-root/node_modules/braces/node_modules/extend-shallow": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/find-yarn-workspace-root/node_modules/fill-range": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/find-yarn-workspace-root/node_modules/fill-range/node_modules/extend-shallow": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/find-yarn-workspace-root/node_modules/fs-extra": { + "version": "4.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + } + }, + "node_modules/ganache-core/node_modules/find-yarn-workspace-root/node_modules/is-buffer": { + "version": "1.1.6", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/find-yarn-workspace-root/node_modules/is-extendable": { + "version": "0.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/find-yarn-workspace-root/node_modules/is-number": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/find-yarn-workspace-root/node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/find-yarn-workspace-root/node_modules/micromatch": { + "version": "3.1.10", + "dev": true, + "license": "MIT", + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/find-yarn-workspace-root/node_modules/to-regex-range": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/flow-stoplight": { + "version": "1.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/ganache-core/node_modules/for-each": { + "version": "0.3.3", + "dev": true, + "license": "MIT", + "dependencies": { + "is-callable": "^1.1.3" + } + }, + "node_modules/ganache-core/node_modules/for-in": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/forever-agent": { + "version": "0.6.1", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "*" + } + }, + "node_modules/ganache-core/node_modules/form-data": { + "version": "2.3.3", + "dev": true, + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/ganache-core/node_modules/forwarded": { + "version": "0.1.2", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/fragment-cache": { + "version": "0.2.1", + "dev": true, + "license": "MIT", + "dependencies": { + "map-cache": "^0.2.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/fresh": { + "version": "0.5.2", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/fs-extra": { + "version": "7.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + }, + "engines": { + "node": ">=6 <7 || >=8" + } + }, + "node_modules/ganache-core/node_modules/fs.realpath": { + "version": "1.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/ganache-core/node_modules/function-bind": { + "version": "1.1.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/functional-red-black-tree": { + "version": "1.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/get-intrinsic": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/get-stream": { + "version": "5.2.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "pump": "^3.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ganache-core/node_modules/get-value": { + "version": "2.0.6", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/getpass": { + "version": "0.1.7", + "dev": true, + "license": "MIT", + "dependencies": { + "assert-plus": "^1.0.0" + } + }, + "node_modules/ganache-core/node_modules/glob": { + "version": "7.1.3", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ganache-core/node_modules/global": { + "version": "4.4.0", + "dev": true, + "license": "MIT", + "dependencies": { + "min-document": "^2.19.0", + "process": "^0.11.10" + } + }, + "node_modules/ganache-core/node_modules/got": { + "version": "9.6.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@sindresorhus/is": "^0.14.0", + "@szmarczak/http-timer": "^1.1.2", + "cacheable-request": "^6.0.0", + "decompress-response": "^3.3.0", + "duplexer3": "^0.1.4", + "get-stream": "^4.1.0", + "lowercase-keys": "^1.0.1", + "mimic-response": "^1.0.1", + "p-cancelable": "^1.0.0", + "to-readable-stream": "^1.0.0", + "url-parse-lax": "^3.0.0" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/ganache-core/node_modules/got/node_modules/get-stream": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "pump": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/graceful-fs": { + "version": "4.2.4", + "dev": true, + "license": "ISC" + }, + "node_modules/ganache-core/node_modules/har-schema": { + "version": "2.0.0", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/har-validator": { + "version": "5.1.5", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.3", + "har-schema": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/has": { + "version": "1.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/ganache-core/node_modules/has-ansi": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/has-ansi/node_modules/ansi-regex": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/has-flag": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/has-symbol-support-x": { + "version": "1.4.2", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": "*" + } + }, + "node_modules/ganache-core/node_modules/has-symbols": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/has-to-string-tag-x": { + "version": "1.4.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "has-symbol-support-x": "^1.4.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ganache-core/node_modules/has-value": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "get-value": "^2.0.6", + "has-values": "^1.0.0", + "isobject": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/has-values": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^3.0.0", + "kind-of": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/has-values/node_modules/is-buffer": { + "version": "1.1.6", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/has-values/node_modules/is-number": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/has-values/node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/has-values/node_modules/kind-of": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/hash-base": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.4", + "readable-stream": "^3.6.0", + "safe-buffer": "^5.2.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/hash-base/node_modules/readable-stream": { + "version": "3.6.0", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/ganache-core/node_modules/hash.js": { + "version": "1.1.7", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "minimalistic-assert": "^1.0.1" + } + }, + "node_modules/ganache-core/node_modules/heap": { + "version": "0.2.6", + "dev": true + }, + "node_modules/ganache-core/node_modules/hmac-drbg": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "hash.js": "^1.0.3", + "minimalistic-assert": "^1.0.0", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "node_modules/ganache-core/node_modules/home-or-tmp": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "os-homedir": "^1.0.0", + "os-tmpdir": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/http-cache-semantics": { + "version": "4.1.0", + "dev": true, + "license": "BSD-2-Clause", + "optional": true + }, + "node_modules/ganache-core/node_modules/http-errors": { + "version": "1.7.2", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "depd": "~1.1.2", + "inherits": "2.0.3", + "setprototypeof": "1.1.1", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/http-errors/node_modules/inherits": { + "version": "2.0.3", + "dev": true, + "license": "ISC", + "optional": true + }, + "node_modules/ganache-core/node_modules/http-https": { + "version": "1.0.0", + "dev": true, + "license": "ISC", + "optional": true + }, + "node_modules/ganache-core/node_modules/http-signature": { + "version": "1.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + }, + "engines": { + "node": ">=0.8", + "npm": ">=1.3.7" + } + }, + "node_modules/ganache-core/node_modules/iconv-lite": { + "version": "0.4.24", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/idna-uts46-hx": { + "version": "2.3.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "punycode": "2.1.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/ganache-core/node_modules/idna-uts46-hx/node_modules/punycode": { + "version": "2.1.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/ieee754": { + "version": "1.2.1", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/ganache-core/node_modules/immediate": { + "version": "3.2.3", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/inflight": { + "version": "1.0.6", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/ganache-core/node_modules/inherits": { + "version": "2.0.4", + "dev": true, + "license": "ISC" + }, + "node_modules/ganache-core/node_modules/invariant": { + "version": "2.2.4", + "dev": true, + "license": "MIT", + "dependencies": { + "loose-envify": "^1.0.0" + } + }, + "node_modules/ganache-core/node_modules/ipaddr.js": { + "version": "1.9.1", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/ganache-core/node_modules/is-accessor-descriptor": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^6.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/is-arguments": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/is-callable": { + "version": "1.2.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/is-ci": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ci-info": "^2.0.0" + }, + "bin": { + "is-ci": "bin.js" + } + }, + "node_modules/ganache-core/node_modules/is-data-descriptor": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^6.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/is-date-object": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/is-descriptor": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-accessor-descriptor": "^1.0.0", + "is-data-descriptor": "^1.0.0", + "kind-of": "^6.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/is-extendable": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-plain-object": "^2.0.4" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/is-finite": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ganache-core/node_modules/is-fn": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/is-function": { + "version": "1.0.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/is-hex-prefixed": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.5.0", + "npm": ">=3" + } + }, + "node_modules/ganache-core/node_modules/is-negative-zero": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/is-object": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "optional": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/is-plain-obj": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/is-plain-object": { + "version": "2.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/is-regex": { + "version": "1.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/is-retry-allowed": { + "version": "1.2.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/is-symbol": { + "version": "1.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/is-typedarray": { + "version": "1.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/is-windows": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/isarray": { + "version": "1.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/isexe": { + "version": "2.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/ganache-core/node_modules/isobject": { + "version": "3.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/isstream": { + "version": "0.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/isurl": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "has-to-string-tag-x": "^1.2.0", + "is-object": "^1.0.1" + }, + "engines": { + "node": ">= 4" + } + }, + "node_modules/ganache-core/node_modules/js-sha3": { + "version": "0.5.7", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/js-tokens": { + "version": "4.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/jsbn": { + "version": "0.1.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/json-buffer": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/json-rpc-engine": { + "version": "3.8.0", + "dev": true, + "license": "ISC", + "dependencies": { + "async": "^2.0.1", + "babel-preset-env": "^1.7.0", + "babelify": "^7.3.0", + "json-rpc-error": "^2.0.0", + "promise-to-callback": "^1.0.0", + "safe-event-emitter": "^1.0.1" + } + }, + "node_modules/ganache-core/node_modules/json-rpc-error": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.1" + } + }, + "node_modules/ganache-core/node_modules/json-rpc-random-id": { + "version": "1.0.1", + "dev": true, + "license": "ISC" + }, + "node_modules/ganache-core/node_modules/json-schema": { + "version": "0.2.3", + "dev": true + }, + "node_modules/ganache-core/node_modules/json-schema-traverse": { + "version": "0.4.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/json-stable-stringify": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "jsonify": "~0.0.0" + } + }, + "node_modules/ganache-core/node_modules/json-stringify-safe": { + "version": "5.0.1", + "dev": true, + "license": "ISC" + }, + "node_modules/ganache-core/node_modules/jsonfile": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/ganache-core/node_modules/jsonify": { + "version": "0.0.0", + "dev": true, + "license": "Public Domain" + }, + "node_modules/ganache-core/node_modules/jsprim": { + "version": "1.4.1", + "dev": true, + "engines": [ + "node >=0.6.0" + ], + "license": "MIT", + "dependencies": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.2.3", + "verror": "1.10.0" + } + }, + "node_modules/ganache-core/node_modules/keccak": { + "version": "3.0.1", + "dev": true, + "hasInstallScript": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "node-addon-api": "^2.0.0", + "node-gyp-build": "^4.2.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/ganache-core/node_modules/keyv": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "json-buffer": "3.0.0" + } + }, + "node_modules/ganache-core/node_modules/kind-of": { + "version": "6.0.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/klaw-sync": { + "version": "6.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.1.11" + } + }, + "node_modules/ganache-core/node_modules/level-codec": { + "version": "9.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer": "^5.6.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/level-errors": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "errno": "~0.1.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/level-iterator-stream": { + "version": "2.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.1", + "readable-stream": "^2.0.5", + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/level-mem": { + "version": "3.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "level-packager": "~4.0.0", + "memdown": "~3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/level-mem/node_modules/abstract-leveldown": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "xtend": "~4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/level-mem/node_modules/ltgt": { + "version": "2.2.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/level-mem/node_modules/memdown": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "abstract-leveldown": "~5.0.0", + "functional-red-black-tree": "~1.0.1", + "immediate": "~3.2.3", + "inherits": "~2.0.1", + "ltgt": "~2.2.0", + "safe-buffer": "~5.1.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/level-mem/node_modules/safe-buffer": { + "version": "5.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/level-packager": { + "version": "4.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "encoding-down": "~5.0.0", + "levelup": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/level-post": { + "version": "1.0.7", + "dev": true, + "license": "MIT", + "dependencies": { + "ltgt": "^2.1.2" + } + }, + "node_modules/ganache-core/node_modules/level-sublevel": { + "version": "6.6.4", + "dev": true, + "license": "MIT", + "dependencies": { + "bytewise": "~1.1.0", + "level-codec": "^9.0.0", + "level-errors": "^2.0.0", + "level-iterator-stream": "^2.0.3", + "ltgt": "~2.1.1", + "pull-defer": "^0.2.2", + "pull-level": "^2.0.3", + "pull-stream": "^3.6.8", + "typewiselite": "~1.0.0", + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/level-ws": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "readable-stream": "^2.2.8", + "xtend": "^4.0.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/levelup": { + "version": "3.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "deferred-leveldown": "~4.0.0", + "level-errors": "~2.0.0", + "level-iterator-stream": "~3.0.0", + "xtend": "~4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/levelup/node_modules/level-iterator-stream": { + "version": "3.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.1", + "readable-stream": "^2.3.6", + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/lodash": { + "version": "4.17.20", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/looper": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/loose-envify": { + "version": "1.4.0", + "dev": true, + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/ganache-core/node_modules/lowercase-keys": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/lru-cache": { + "version": "5.1.1", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/ganache-core/node_modules/ltgt": { + "version": "2.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/map-cache": { + "version": "0.2.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/map-visit": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "object-visit": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/md5.js": { + "version": "1.3.5", + "dev": true, + "license": "MIT", + "dependencies": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "node_modules/ganache-core/node_modules/media-typer": { + "version": "0.3.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/merge-descriptors": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/merkle-patricia-tree": { + "version": "3.0.0", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^2.6.1", + "ethereumjs-util": "^5.2.0", + "level-mem": "^3.0.1", + "level-ws": "^1.0.0", + "readable-stream": "^3.0.6", + "rlp": "^2.0.0", + "semaphore": ">=1.0.1" + } + }, + "node_modules/ganache-core/node_modules/merkle-patricia-tree/node_modules/ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/merkle-patricia-tree/node_modules/readable-stream": { + "version": "3.6.0", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/ganache-core/node_modules/methods": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/miller-rabin": { + "version": "4.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "bn.js": "^4.0.0", + "brorand": "^1.0.1" + }, + "bin": { + "miller-rabin": "bin/miller-rabin" + } + }, + "node_modules/ganache-core/node_modules/mime": { + "version": "1.6.0", + "dev": true, + "license": "MIT", + "optional": true, + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/mime-db": { + "version": "1.45.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/mime-types": { + "version": "2.1.28", + "dev": true, + "license": "MIT", + "dependencies": { + "mime-db": "1.45.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/mimic-response": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/min-document": { + "version": "2.19.0", + "dev": true, + "dependencies": { + "dom-walk": "^0.1.0" + } + }, + "node_modules/ganache-core/node_modules/minimalistic-assert": { + "version": "1.0.1", + "dev": true, + "license": "ISC" + }, + "node_modules/ganache-core/node_modules/minimalistic-crypto-utils": { + "version": "1.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/minimatch": { + "version": "3.0.4", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ganache-core/node_modules/minimist": { + "version": "1.2.5", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/minizlib": { + "version": "1.3.3", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "minipass": "^2.9.0" + } + }, + "node_modules/ganache-core/node_modules/minizlib/node_modules/minipass": { + "version": "2.9.0", + "dev": true, + "license": "ISC", + "optional": true, + "dependencies": { + "safe-buffer": "^5.1.2", + "yallist": "^3.0.0" + } + }, + "node_modules/ganache-core/node_modules/mixin-deep": { + "version": "1.3.2", + "dev": true, + "license": "MIT", + "dependencies": { + "for-in": "^1.0.2", + "is-extendable": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/mkdirp": { + "version": "0.5.5", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.5" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/ganache-core/node_modules/mkdirp-promise": { + "version": "5.0.1", + "dev": true, + "license": "ISC", + "optional": true, + "dependencies": { + "mkdirp": "*" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/mock-fs": { + "version": "4.13.0", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/ms": { + "version": "2.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/multibase": { + "version": "0.6.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "base-x": "^3.0.8", + "buffer": "^5.5.0" + } + }, + "node_modules/ganache-core/node_modules/multicodec": { + "version": "0.5.7", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "varint": "^5.0.0" + } + }, + "node_modules/ganache-core/node_modules/multihashes": { + "version": "0.4.21", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "buffer": "^5.5.0", + "multibase": "^0.7.0", + "varint": "^5.0.0" + } + }, + "node_modules/ganache-core/node_modules/multihashes/node_modules/multibase": { + "version": "0.7.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "base-x": "^3.0.8", + "buffer": "^5.5.0" + } + }, + "node_modules/ganache-core/node_modules/nano-json-stream-parser": { + "version": "0.1.2", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/nanomatch": { + "version": "1.2.13", + "dev": true, + "license": "MIT", + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "fragment-cache": "^0.2.1", + "is-windows": "^1.0.2", + "kind-of": "^6.0.2", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/negotiator": { + "version": "0.6.2", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/next-tick": { + "version": "1.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/nice-try": { + "version": "1.0.5", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/node-addon-api": { + "version": "2.0.2", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/node-fetch": { + "version": "2.1.2", + "dev": true, + "license": "MIT", + "engines": { + "node": "4.x || >=6.0.0" + } + }, + "node_modules/ganache-core/node_modules/node-gyp-build": { + "version": "4.2.3", + "dev": true, + "inBundle": true, + "license": "MIT", + "bin": { + "node-gyp-build": "bin.js", + "node-gyp-build-optional": "optional.js", + "node-gyp-build-test": "build-test.js" + } + }, + "node_modules/ganache-core/node_modules/normalize-url": { + "version": "4.5.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ganache-core/node_modules/number-to-bn": { + "version": "1.7.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "bn.js": "4.11.6", + "strip-hex-prefix": "1.0.0" + }, + "engines": { + "node": ">=6.5.0", + "npm": ">=3" + } + }, + "node_modules/ganache-core/node_modules/number-to-bn/node_modules/bn.js": { + "version": "4.11.6", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/oauth-sign": { + "version": "0.9.0", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "*" + } + }, + "node_modules/ganache-core/node_modules/object-assign": { + "version": "4.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/object-copy": { + "version": "0.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "copy-descriptor": "^0.1.0", + "define-property": "^0.2.5", + "kind-of": "^3.0.3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/object-copy/node_modules/define-property": { + "version": "0.2.5", + "dev": true, + "license": "MIT", + "dependencies": { + "is-descriptor": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/object-copy/node_modules/is-accessor-descriptor": { + "version": "0.1.6", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/object-copy/node_modules/is-buffer": { + "version": "1.1.6", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/object-copy/node_modules/is-data-descriptor": { + "version": "0.1.4", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/object-copy/node_modules/is-descriptor": { + "version": "0.1.6", + "dev": true, + "license": "MIT", + "dependencies": { + "is-accessor-descriptor": "^0.1.6", + "is-data-descriptor": "^0.1.4", + "kind-of": "^5.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/object-copy/node_modules/is-descriptor/node_modules/kind-of": { + "version": "5.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/object-copy/node_modules/kind-of": { + "version": "3.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/object-inspect": { + "version": "1.9.0", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/object-is": { + "version": "1.1.4", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/object-keys": { + "version": "1.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ganache-core/node_modules/object-visit": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "isobject": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/object.assign": { + "version": "4.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "has-symbols": "^1.0.1", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/object.getownpropertydescriptors": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "es-abstract": "^1.18.0-next.1" + }, + "engines": { + "node": ">= 0.8" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/object.pick": { + "version": "1.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/oboe": { + "version": "2.1.4", + "dev": true, + "license": "BSD", + "optional": true, + "dependencies": { + "http-https": "^1.0.0" + } + }, + "node_modules/ganache-core/node_modules/on-finished": { + "version": "2.3.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/ganache-core/node_modules/once": { + "version": "1.4.0", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/ganache-core/node_modules/os-homedir": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/os-tmpdir": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/p-cancelable": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/p-timeout": { + "version": "1.2.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "p-finally": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/p-timeout/node_modules/p-finally": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/parse-asn1": { + "version": "5.1.6", + "dev": true, + "license": "ISC", + "optional": true, + "dependencies": { + "asn1.js": "^5.2.0", + "browserify-aes": "^1.0.0", + "evp_bytestokey": "^1.0.0", + "pbkdf2": "^3.0.3", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/parse-headers": { + "version": "2.0.3", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/parseurl": { + "version": "1.3.3", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/ganache-core/node_modules/pascalcase": { + "version": "0.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/patch-package": { + "version": "6.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "@yarnpkg/lockfile": "^1.1.0", + "chalk": "^2.4.2", + "cross-spawn": "^6.0.5", + "find-yarn-workspace-root": "^1.2.1", + "fs-extra": "^7.0.1", + "is-ci": "^2.0.0", + "klaw-sync": "^6.0.0", + "minimist": "^1.2.0", + "rimraf": "^2.6.3", + "semver": "^5.6.0", + "slash": "^2.0.0", + "tmp": "^0.0.33" + }, + "bin": { + "patch-package": "index.js" + }, + "engines": { + "npm": ">5" + } + }, + "node_modules/ganache-core/node_modules/patch-package/node_modules/cross-spawn": { + "version": "6.0.5", + "dev": true, + "license": "MIT", + "dependencies": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "engines": { + "node": ">=4.8" + } + }, + "node_modules/ganache-core/node_modules/patch-package/node_modules/path-key": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/patch-package/node_modules/semver": { + "version": "5.7.1", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/ganache-core/node_modules/patch-package/node_modules/shebang-command": { + "version": "1.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/patch-package/node_modules/shebang-regex": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/patch-package/node_modules/slash": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/patch-package/node_modules/tmp": { + "version": "0.0.33", + "dev": true, + "license": "MIT", + "dependencies": { + "os-tmpdir": "~1.0.2" + }, + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/ganache-core/node_modules/patch-package/node_modules/which": { + "version": "1.3.1", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, + "node_modules/ganache-core/node_modules/path-is-absolute": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/path-parse": { + "version": "1.0.6", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/path-to-regexp": { + "version": "0.1.7", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/pbkdf2": { + "version": "3.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "create-hash": "^1.1.2", + "create-hmac": "^1.1.4", + "ripemd160": "^2.0.1", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + }, + "engines": { + "node": ">=0.12" + } + }, + "node_modules/ganache-core/node_modules/performance-now": { + "version": "2.1.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/posix-character-classes": { + "version": "0.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/precond": { + "version": "0.2.3", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/prepend-http": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/private": { + "version": "0.1.8", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/process": { + "version": "0.11.10", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/ganache-core/node_modules/process-nextick-args": { + "version": "2.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/promise-to-callback": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "is-fn": "^1.0.0", + "set-immediate-shim": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/proxy-addr": { + "version": "2.0.6", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "forwarded": "~0.1.2", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/ganache-core/node_modules/prr": { + "version": "1.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/pseudomap": { + "version": "1.0.2", + "dev": true, + "license": "ISC" + }, + "node_modules/ganache-core/node_modules/psl": { + "version": "1.8.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/public-encrypt": { + "version": "4.0.3", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "bn.js": "^4.1.0", + "browserify-rsa": "^4.0.0", + "create-hash": "^1.1.0", + "parse-asn1": "^5.0.0", + "randombytes": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "node_modules/ganache-core/node_modules/pull-cat": { + "version": "1.1.11", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/pull-defer": { + "version": "0.2.3", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/pull-level": { + "version": "2.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "level-post": "^1.0.7", + "pull-cat": "^1.1.9", + "pull-live": "^1.0.1", + "pull-pushable": "^2.0.0", + "pull-stream": "^3.4.0", + "pull-window": "^2.1.4", + "stream-to-pull-stream": "^1.7.1" + } + }, + "node_modules/ganache-core/node_modules/pull-live": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "pull-cat": "^1.1.9", + "pull-stream": "^3.4.0" + } + }, + "node_modules/ganache-core/node_modules/pull-pushable": { + "version": "2.2.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/pull-stream": { + "version": "3.6.14", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/pull-window": { + "version": "2.1.4", + "dev": true, + "license": "MIT", + "dependencies": { + "looper": "^2.0.0" + } + }, + "node_modules/ganache-core/node_modules/pump": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/ganache-core/node_modules/punycode": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/qs": { + "version": "6.5.2", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/ganache-core/node_modules/query-string": { + "version": "5.1.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "decode-uri-component": "^0.2.0", + "object-assign": "^4.1.0", + "strict-uri-encode": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/randombytes": { + "version": "2.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/ganache-core/node_modules/randomfill": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "randombytes": "^2.0.5", + "safe-buffer": "^5.1.0" + } + }, + "node_modules/ganache-core/node_modules/range-parser": { + "version": "1.2.1", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/raw-body": { + "version": "2.4.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "bytes": "3.1.0", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/ganache-core/node_modules/readable-stream": { + "version": "2.3.7", + "dev": true, + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/ganache-core/node_modules/readable-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/regenerate": { + "version": "1.4.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/regenerator-runtime": { + "version": "0.11.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/regenerator-transform": { + "version": "0.10.1", + "dev": true, + "license": "BSD", + "dependencies": { + "babel-runtime": "^6.18.0", + "babel-types": "^6.19.0", + "private": "^0.1.6" + } + }, + "node_modules/ganache-core/node_modules/regex-not": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "extend-shallow": "^3.0.2", + "safe-regex": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/regexp.prototype.flags": { + "version": "1.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/regexp.prototype.flags/node_modules/es-abstract": { + "version": "1.17.7", + "dev": true, + "license": "MIT", + "dependencies": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/regexpu-core": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "regenerate": "^1.2.1", + "regjsgen": "^0.2.0", + "regjsparser": "^0.1.4" + } + }, + "node_modules/ganache-core/node_modules/regjsgen": { + "version": "0.2.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/regjsparser": { + "version": "0.1.5", + "dev": true, + "license": "BSD", + "dependencies": { + "jsesc": "~0.5.0" + }, + "bin": { + "regjsparser": "bin/parser" + } + }, + "node_modules/ganache-core/node_modules/regjsparser/node_modules/jsesc": { + "version": "0.5.0", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + } + }, + "node_modules/ganache-core/node_modules/repeat-element": { + "version": "1.1.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/repeat-string": { + "version": "1.6.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/ganache-core/node_modules/repeating": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-finite": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/request": { + "version": "2.88.2", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.3", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.5.0", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/ganache-core/node_modules/resolve-url": { + "version": "0.2.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/responselike": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "lowercase-keys": "^1.0.0" + } + }, + "node_modules/ganache-core/node_modules/resumer": { + "version": "0.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "through": "~2.3.4" + } + }, + "node_modules/ganache-core/node_modules/ret": { + "version": "0.1.15", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12" + } + }, + "node_modules/ganache-core/node_modules/rimraf": { + "version": "2.6.3", + "dev": true, + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/ganache-core/node_modules/ripemd160": { + "version": "2.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1" + } + }, + "node_modules/ganache-core/node_modules/rlp": { + "version": "2.2.6", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "bn.js": "^4.11.1" + }, + "bin": { + "rlp": "bin/rlp" + } + }, + "node_modules/ganache-core/node_modules/rustbn.js": { + "version": "0.2.0", + "dev": true, + "license": "(MIT OR Apache-2.0)" + }, + "node_modules/ganache-core/node_modules/safe-buffer": { + "version": "5.2.1", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/safe-event-emitter": { + "version": "1.0.1", + "dev": true, + "license": "ISC", + "dependencies": { + "events": "^3.0.0" + } + }, + "node_modules/ganache-core/node_modules/safe-regex": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ret": "~0.1.10" + } + }, + "node_modules/ganache-core/node_modules/safer-buffer": { + "version": "2.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/scrypt-js": { + "version": "3.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/scryptsy": { + "version": "1.2.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "pbkdf2": "^3.0.3" + } + }, + "node_modules/ganache-core/node_modules/secp256k1": { + "version": "4.0.2", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "elliptic": "^6.5.2", + "node-addon-api": "^2.0.0", + "node-gyp-build": "^4.2.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/ganache-core/node_modules/seedrandom": { + "version": "3.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/semaphore": { + "version": "1.1.0", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/ganache-core/node_modules/send": { + "version": "0.17.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "debug": "2.6.9", + "depd": "~1.1.2", + "destroy": "~1.0.4", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "~1.7.2", + "mime": "1.6.0", + "ms": "2.1.1", + "on-finished": "~2.3.0", + "range-parser": "~1.2.1", + "statuses": "~1.5.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/ganache-core/node_modules/send/node_modules/debug": { + "version": "2.6.9", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/ganache-core/node_modules/send/node_modules/debug/node_modules/ms": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/send/node_modules/ms": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/serve-static": { + "version": "1.14.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.17.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/ganache-core/node_modules/servify": { + "version": "0.1.12", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "body-parser": "^1.16.0", + "cors": "^2.8.1", + "express": "^4.14.0", + "request": "^2.79.0", + "xhr": "^2.3.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/set-immediate-shim": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/set-value": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "extend-shallow": "^2.0.1", + "is-extendable": "^0.1.1", + "is-plain-object": "^2.0.3", + "split-string": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/set-value/node_modules/extend-shallow": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/set-value/node_modules/is-extendable": { + "version": "0.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/setimmediate": { + "version": "1.0.5", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/setprototypeof": { + "version": "1.1.1", + "dev": true, + "license": "ISC", + "optional": true + }, + "node_modules/ganache-core/node_modules/sha.js": { + "version": "2.4.11", + "dev": true, + "license": "(MIT AND BSD-3-Clause)", + "dependencies": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + }, + "bin": { + "sha.js": "bin.js" + } + }, + "node_modules/ganache-core/node_modules/simple-concat": { + "version": "1.0.1", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/simple-get": { + "version": "2.8.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "decompress-response": "^3.3.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + } + }, + "node_modules/ganache-core/node_modules/snapdragon": { + "version": "0.8.2", + "dev": true, + "license": "MIT", + "dependencies": { + "base": "^0.11.1", + "debug": "^2.2.0", + "define-property": "^0.2.5", + "extend-shallow": "^2.0.1", + "map-cache": "^0.2.2", + "source-map": "^0.5.6", + "source-map-resolve": "^0.5.0", + "use": "^3.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/snapdragon-node": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "define-property": "^1.0.0", + "isobject": "^3.0.0", + "snapdragon-util": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/snapdragon-node/node_modules/define-property": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "is-descriptor": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/snapdragon-util": { + "version": "3.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^3.2.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/snapdragon-util/node_modules/is-buffer": { + "version": "1.1.6", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/snapdragon-util/node_modules/kind-of": { + "version": "3.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/snapdragon/node_modules/debug": { + "version": "2.6.9", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/ganache-core/node_modules/snapdragon/node_modules/define-property": { + "version": "0.2.5", + "dev": true, + "license": "MIT", + "dependencies": { + "is-descriptor": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/snapdragon/node_modules/extend-shallow": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/snapdragon/node_modules/is-accessor-descriptor": { + "version": "0.1.6", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/snapdragon/node_modules/is-accessor-descriptor/node_modules/kind-of": { + "version": "3.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/snapdragon/node_modules/is-buffer": { + "version": "1.1.6", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/snapdragon/node_modules/is-data-descriptor": { + "version": "0.1.4", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/snapdragon/node_modules/is-data-descriptor/node_modules/kind-of": { + "version": "3.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/snapdragon/node_modules/is-descriptor": { + "version": "0.1.6", + "dev": true, + "license": "MIT", + "dependencies": { + "is-accessor-descriptor": "^0.1.6", + "is-data-descriptor": "^0.1.4", + "kind-of": "^5.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/snapdragon/node_modules/is-extendable": { + "version": "0.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/snapdragon/node_modules/kind-of": { + "version": "5.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/snapdragon/node_modules/ms": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/source-map": { + "version": "0.5.7", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/source-map-resolve": { + "version": "0.5.3", + "dev": true, + "license": "MIT", + "dependencies": { + "atob": "^2.1.2", + "decode-uri-component": "^0.2.0", + "resolve-url": "^0.2.1", + "source-map-url": "^0.4.0", + "urix": "^0.1.0" + } + }, + "node_modules/ganache-core/node_modules/source-map-support": { + "version": "0.5.12", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/ganache-core/node_modules/source-map-support/node_modules/source-map": { + "version": "0.6.1", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/source-map-url": { + "version": "0.4.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/split-string": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "extend-shallow": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/sshpk": { + "version": "1.16.1", + "dev": true, + "license": "MIT", + "dependencies": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/sshpk/node_modules/tweetnacl": { + "version": "0.14.5", + "dev": true, + "license": "Unlicense" + }, + "node_modules/ganache-core/node_modules/static-extend": { + "version": "0.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "define-property": "^0.2.5", + "object-copy": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/static-extend/node_modules/define-property": { + "version": "0.2.5", + "dev": true, + "license": "MIT", + "dependencies": { + "is-descriptor": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/static-extend/node_modules/is-accessor-descriptor": { + "version": "0.1.6", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/static-extend/node_modules/is-accessor-descriptor/node_modules/kind-of": { + "version": "3.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/static-extend/node_modules/is-buffer": { + "version": "1.1.6", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/static-extend/node_modules/is-data-descriptor": { + "version": "0.1.4", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/static-extend/node_modules/is-data-descriptor/node_modules/kind-of": { + "version": "3.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/static-extend/node_modules/is-descriptor": { + "version": "0.1.6", + "dev": true, + "license": "MIT", + "dependencies": { + "is-accessor-descriptor": "^0.1.6", + "is-data-descriptor": "^0.1.4", + "kind-of": "^5.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/static-extend/node_modules/kind-of": { + "version": "5.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/statuses": { + "version": "1.5.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/stream-to-pull-stream": { + "version": "1.7.3", + "dev": true, + "license": "MIT", + "dependencies": { + "looper": "^3.0.0", + "pull-stream": "^3.2.3" + } + }, + "node_modules/ganache-core/node_modules/stream-to-pull-stream/node_modules/looper": { + "version": "3.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/strict-uri-encode": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/string_decoder": { + "version": "1.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/ganache-core/node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/string.prototype.trim": { + "version": "1.2.3", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "es-abstract": "^1.18.0-next.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/string.prototype.trimend": { + "version": "1.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/string.prototype.trimstart": { + "version": "1.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/strip-hex-prefix": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "is-hex-prefixed": "1.0.0" + }, + "engines": { + "node": ">=6.5.0", + "npm": ">=3" + } + }, + "node_modules/ganache-core/node_modules/supports-color": { + "version": "5.5.0", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/swarm-js": { + "version": "0.1.40", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "bluebird": "^3.5.0", + "buffer": "^5.0.5", + "eth-lib": "^0.1.26", + "fs-extra": "^4.0.2", + "got": "^7.1.0", + "mime-types": "^2.1.16", + "mkdirp-promise": "^5.0.1", + "mock-fs": "^4.1.0", + "setimmediate": "^1.0.5", + "tar": "^4.0.2", + "xhr-request": "^1.0.1" + } + }, + "node_modules/ganache-core/node_modules/swarm-js/node_modules/fs-extra": { + "version": "4.0.3", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + } + }, + "node_modules/ganache-core/node_modules/swarm-js/node_modules/get-stream": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/swarm-js/node_modules/got": { + "version": "7.1.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "decompress-response": "^3.2.0", + "duplexer3": "^0.1.4", + "get-stream": "^3.0.0", + "is-plain-obj": "^1.1.0", + "is-retry-allowed": "^1.0.0", + "is-stream": "^1.0.0", + "isurl": "^1.0.0-alpha5", + "lowercase-keys": "^1.0.0", + "p-cancelable": "^0.3.0", + "p-timeout": "^1.1.1", + "safe-buffer": "^5.0.1", + "timed-out": "^4.0.0", + "url-parse-lax": "^1.0.0", + "url-to-options": "^1.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/swarm-js/node_modules/is-stream": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/swarm-js/node_modules/p-cancelable": { + "version": "0.3.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/swarm-js/node_modules/prepend-http": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/swarm-js/node_modules/url-parse-lax": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "prepend-http": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/tape": { + "version": "4.13.3", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-equal": "~1.1.1", + "defined": "~1.0.0", + "dotignore": "~0.1.2", + "for-each": "~0.3.3", + "function-bind": "~1.1.1", + "glob": "~7.1.6", + "has": "~1.0.3", + "inherits": "~2.0.4", + "is-regex": "~1.0.5", + "minimist": "~1.2.5", + "object-inspect": "~1.7.0", + "resolve": "~1.17.0", + "resumer": "~0.0.0", + "string.prototype.trim": "~1.2.1", + "through": "~2.3.8" + }, + "bin": { + "tape": "bin/tape" + } + }, + "node_modules/ganache-core/node_modules/tape/node_modules/glob": { + "version": "7.1.6", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/ganache-core/node_modules/tape/node_modules/is-regex": { + "version": "1.0.5", + "dev": true, + "license": "MIT", + "dependencies": { + "has": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/tape/node_modules/object-inspect": { + "version": "1.7.0", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/tape/node_modules/resolve": { + "version": "1.17.0", + "dev": true, + "license": "MIT", + "dependencies": { + "path-parse": "^1.0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/tar": { + "version": "4.4.13", + "dev": true, + "license": "ISC", + "optional": true, + "dependencies": { + "chownr": "^1.1.1", + "fs-minipass": "^1.2.5", + "minipass": "^2.8.6", + "minizlib": "^1.2.1", + "mkdirp": "^0.5.0", + "safe-buffer": "^5.1.2", + "yallist": "^3.0.3" + }, + "engines": { + "node": ">=4.5" + } + }, + "node_modules/ganache-core/node_modules/tar/node_modules/fs-minipass": { + "version": "1.2.7", + "dev": true, + "license": "ISC", + "optional": true, + "dependencies": { + "minipass": "^2.6.0" + } + }, + "node_modules/ganache-core/node_modules/tar/node_modules/minipass": { + "version": "2.9.0", + "dev": true, + "license": "ISC", + "optional": true, + "dependencies": { + "safe-buffer": "^5.1.2", + "yallist": "^3.0.0" + } + }, + "node_modules/ganache-core/node_modules/through": { + "version": "2.3.8", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/through2": { + "version": "2.0.5", + "dev": true, + "license": "MIT", + "dependencies": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + } + }, + "node_modules/ganache-core/node_modules/timed-out": { + "version": "4.0.1", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/tmp": { + "version": "0.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "rimraf": "^2.6.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/to-object-path": { + "version": "0.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/to-object-path/node_modules/is-buffer": { + "version": "1.1.6", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/to-object-path/node_modules/kind-of": { + "version": "3.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/to-readable-stream": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/ganache-core/node_modules/to-regex": { + "version": "3.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "regex-not": "^1.0.2", + "safe-regex": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/toidentifier": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.6" + } + }, + "node_modules/ganache-core/node_modules/tough-cookie": { + "version": "2.5.0", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "psl": "^1.1.28", + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/ganache-core/node_modules/trim-right": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/tunnel-agent": { + "version": "0.6.0", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ganache-core/node_modules/tweetnacl": { + "version": "1.0.3", + "dev": true, + "license": "Unlicense" + }, + "node_modules/ganache-core/node_modules/tweetnacl-util": { + "version": "0.15.1", + "dev": true, + "license": "Unlicense" + }, + "node_modules/ganache-core/node_modules/type": { + "version": "1.2.0", + "dev": true, + "license": "ISC" + }, + "node_modules/ganache-core/node_modules/type-is": { + "version": "1.6.18", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ganache-core/node_modules/typedarray": { + "version": "0.0.6", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/typedarray-to-buffer": { + "version": "3.1.5", + "dev": true, + "license": "MIT", + "dependencies": { + "is-typedarray": "^1.0.0" + } + }, + "node_modules/ganache-core/node_modules/typewise": { + "version": "1.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "typewise-core": "^1.2.0" + } + }, + "node_modules/ganache-core/node_modules/typewise-core": { + "version": "1.2.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/typewiselite": { + "version": "1.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/ultron": { + "version": "1.1.1", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/underscore": { + "version": "1.9.1", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/union-value": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "arr-union": "^3.1.0", + "get-value": "^2.0.6", + "is-extendable": "^0.1.1", + "set-value": "^2.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/union-value/node_modules/is-extendable": { + "version": "0.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/universalify": { + "version": "0.1.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/ganache-core/node_modules/unorm": { + "version": "1.6.0", + "dev": true, + "license": "MIT or GPL-2.0", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/ganache-core/node_modules/unpipe": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/ganache-core/node_modules/unset-value": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "has-value": "^0.3.1", + "isobject": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/unset-value/node_modules/has-value": { + "version": "0.3.1", + "dev": true, + "license": "MIT", + "dependencies": { + "get-value": "^2.0.3", + "has-values": "^0.1.4", + "isobject": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/unset-value/node_modules/has-value/node_modules/isobject": { + "version": "2.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "isarray": "1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/unset-value/node_modules/has-values": { + "version": "0.1.4", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/uri-js": { + "version": "4.4.1", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/ganache-core/node_modules/urix": { + "version": "0.1.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/url-parse-lax": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "prepend-http": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ganache-core/node_modules/url-set-query": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/url-to-options": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/ganache-core/node_modules/use": { + "version": "3.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ganache-core/node_modules/utf-8-validate": { + "version": "5.0.4", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "node-gyp-build": "^4.2.0" + } + }, + "node_modules/ganache-core/node_modules/utf8": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/util-deprecate": { + "version": "1.0.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/util.promisify": { + "version": "1.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "for-each": "^0.3.3", + "has-symbols": "^1.0.1", + "object.getownpropertydescriptors": "^2.1.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ganache-core/node_modules/utils-merge": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/ganache-core/node_modules/uuid": { + "version": "3.4.0", + "dev": true, + "license": "MIT", + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/ganache-core/node_modules/varint": { + "version": "5.0.2", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/vary": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/ganache-core/node_modules/verror": { + "version": "1.10.0", + "dev": true, + "engines": [ + "node >=0.6.0" + ], + "license": "MIT", + "dependencies": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } + }, + "node_modules/ganache-core/node_modules/web3": { + "version": "1.2.11", + "dev": true, + "hasInstallScript": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "web3-bzz": "1.2.11", + "web3-core": "1.2.11", + "web3-eth": "1.2.11", + "web3-eth-personal": "1.2.11", + "web3-net": "1.2.11", + "web3-shh": "1.2.11", + "web3-utils": "1.2.11" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-bzz": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "@types/node": "^12.12.6", + "got": "9.6.0", + "swarm-js": "^0.1.40", + "underscore": "1.9.1" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-bzz/node_modules/@types/node": { + "version": "12.19.12", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/web3-core": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "@types/bn.js": "^4.11.5", + "@types/node": "^12.12.6", + "bignumber.js": "^9.0.0", + "web3-core-helpers": "1.2.11", + "web3-core-method": "1.2.11", + "web3-core-requestmanager": "1.2.11", + "web3-utils": "1.2.11" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-core-helpers": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "underscore": "1.9.1", + "web3-eth-iban": "1.2.11", + "web3-utils": "1.2.11" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-core-method": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "@ethersproject/transactions": "^5.0.0-beta.135", + "underscore": "1.9.1", + "web3-core-helpers": "1.2.11", + "web3-core-promievent": "1.2.11", + "web3-core-subscriptions": "1.2.11", + "web3-utils": "1.2.11" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-core-promievent": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "eventemitter3": "4.0.4" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-core-requestmanager": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "underscore": "1.9.1", + "web3-core-helpers": "1.2.11", + "web3-providers-http": "1.2.11", + "web3-providers-ipc": "1.2.11", + "web3-providers-ws": "1.2.11" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-core-subscriptions": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "eventemitter3": "4.0.4", + "underscore": "1.9.1", + "web3-core-helpers": "1.2.11" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-core/node_modules/@types/node": { + "version": "12.19.12", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/web3-eth": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "underscore": "1.9.1", + "web3-core": "1.2.11", + "web3-core-helpers": "1.2.11", + "web3-core-method": "1.2.11", + "web3-core-subscriptions": "1.2.11", + "web3-eth-abi": "1.2.11", + "web3-eth-accounts": "1.2.11", + "web3-eth-contract": "1.2.11", + "web3-eth-ens": "1.2.11", + "web3-eth-iban": "1.2.11", + "web3-eth-personal": "1.2.11", + "web3-net": "1.2.11", + "web3-utils": "1.2.11" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-eth-abi": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "@ethersproject/abi": "5.0.0-beta.153", + "underscore": "1.9.1", + "web3-utils": "1.2.11" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-eth-accounts": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "crypto-browserify": "3.12.0", + "eth-lib": "0.2.8", + "ethereumjs-common": "^1.3.2", + "ethereumjs-tx": "^2.1.1", + "scrypt-js": "^3.0.1", + "underscore": "1.9.1", + "uuid": "3.3.2", + "web3-core": "1.2.11", + "web3-core-helpers": "1.2.11", + "web3-core-method": "1.2.11", + "web3-utils": "1.2.11" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-eth-accounts/node_modules/eth-lib": { + "version": "0.2.8", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "bn.js": "^4.11.6", + "elliptic": "^6.4.0", + "xhr-request-promise": "^0.1.2" + } + }, + "node_modules/ganache-core/node_modules/web3-eth-accounts/node_modules/uuid": { + "version": "3.3.2", + "dev": true, + "license": "MIT", + "optional": true, + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/ganache-core/node_modules/web3-eth-contract": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "@types/bn.js": "^4.11.5", + "underscore": "1.9.1", + "web3-core": "1.2.11", + "web3-core-helpers": "1.2.11", + "web3-core-method": "1.2.11", + "web3-core-promievent": "1.2.11", + "web3-core-subscriptions": "1.2.11", + "web3-eth-abi": "1.2.11", + "web3-utils": "1.2.11" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-eth-ens": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "content-hash": "^2.5.2", + "eth-ens-namehash": "2.0.8", + "underscore": "1.9.1", + "web3-core": "1.2.11", + "web3-core-helpers": "1.2.11", + "web3-core-promievent": "1.2.11", + "web3-eth-abi": "1.2.11", + "web3-eth-contract": "1.2.11", + "web3-utils": "1.2.11" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-eth-iban": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "bn.js": "^4.11.9", + "web3-utils": "1.2.11" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-eth-personal": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "@types/node": "^12.12.6", + "web3-core": "1.2.11", + "web3-core-helpers": "1.2.11", + "web3-core-method": "1.2.11", + "web3-net": "1.2.11", + "web3-utils": "1.2.11" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-eth-personal/node_modules/@types/node": { + "version": "12.19.12", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/web3-net": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "web3-core": "1.2.11", + "web3-core-method": "1.2.11", + "web3-utils": "1.2.11" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine": { + "version": "14.2.1", + "dev": true, + "license": "MIT", + "dependencies": { + "async": "^2.5.0", + "backoff": "^2.5.0", + "clone": "^2.0.0", + "cross-fetch": "^2.1.0", + "eth-block-tracker": "^3.0.0", + "eth-json-rpc-infura": "^3.1.0", + "eth-sig-util": "3.0.0", + "ethereumjs-block": "^1.2.2", + "ethereumjs-tx": "^1.2.0", + "ethereumjs-util": "^5.1.5", + "ethereumjs-vm": "^2.3.4", + "json-rpc-error": "^2.0.0", + "json-stable-stringify": "^1.0.1", + "promise-to-callback": "^1.0.0", + "readable-stream": "^2.2.9", + "request": "^2.85.0", + "semaphore": "^1.0.3", + "ws": "^5.1.1", + "xhr": "^2.2.0", + "xtend": "^4.0.1" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/abstract-leveldown": { + "version": "2.6.3", + "dev": true, + "license": "MIT", + "dependencies": { + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/deferred-leveldown": { + "version": "1.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "abstract-leveldown": "~2.6.0" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/eth-sig-util": { + "version": "1.4.2", + "dev": true, + "license": "ISC", + "dependencies": { + "ethereumjs-abi": "git+https://github.com/ethereumjs/ethereumjs-abi.git", + "ethereumjs-util": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/ethereumjs-account": { + "version": "2.0.5", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "ethereumjs-util": "^5.0.0", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/ethereumjs-block": { + "version": "1.7.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^2.0.1", + "ethereum-common": "0.2.0", + "ethereumjs-tx": "^1.2.2", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/ethereumjs-block/node_modules/ethereum-common": { + "version": "0.2.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/ethereumjs-tx": { + "version": "1.3.7", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "ethereum-common": "^0.0.18", + "ethereumjs-util": "^5.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/ethereumjs-vm": { + "version": "2.6.0", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^2.1.2", + "async-eventemitter": "^0.2.2", + "ethereumjs-account": "^2.0.3", + "ethereumjs-block": "~2.2.0", + "ethereumjs-common": "^1.1.0", + "ethereumjs-util": "^6.0.0", + "fake-merkle-patricia-tree": "^1.0.1", + "functional-red-black-tree": "^1.0.1", + "merkle-patricia-tree": "^2.3.2", + "rustbn.js": "~0.2.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/ethereumjs-vm/node_modules/ethereumjs-block": { + "version": "2.2.2", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^2.0.1", + "ethereumjs-common": "^1.5.0", + "ethereumjs-tx": "^2.1.1", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/ethereumjs-vm/node_modules/ethereumjs-block/node_modules/ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/ethereumjs-vm/node_modules/ethereumjs-tx": { + "version": "2.1.2", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "ethereumjs-common": "^1.5.0", + "ethereumjs-util": "^6.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/ethereumjs-vm/node_modules/ethereumjs-util": { + "version": "6.2.1", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "@types/bn.js": "^4.11.3", + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "0.1.6", + "rlp": "^2.2.3" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/isarray": { + "version": "0.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/level-codec": { + "version": "7.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/level-errors": { + "version": "1.0.5", + "dev": true, + "license": "MIT", + "dependencies": { + "errno": "~0.1.1" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/level-iterator-stream": { + "version": "1.3.1", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.1", + "level-errors": "^1.0.3", + "readable-stream": "^1.0.33", + "xtend": "^4.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/level-iterator-stream/node_modules/readable-stream": { + "version": "1.1.14", + "dev": true, + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/level-ws": { + "version": "0.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "readable-stream": "~1.0.15", + "xtend": "~2.1.1" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/level-ws/node_modules/readable-stream": { + "version": "1.0.34", + "dev": true, + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/level-ws/node_modules/xtend": { + "version": "2.1.2", + "dev": true, + "dependencies": { + "object-keys": "~0.4.0" + }, + "engines": { + "node": ">=0.4" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/levelup": { + "version": "1.3.9", + "dev": true, + "license": "MIT", + "dependencies": { + "deferred-leveldown": "~1.2.1", + "level-codec": "~7.0.0", + "level-errors": "~1.0.3", + "level-iterator-stream": "~1.3.0", + "prr": "~1.0.1", + "semver": "~5.4.1", + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/ltgt": { + "version": "2.2.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/memdown": { + "version": "1.4.1", + "dev": true, + "license": "MIT", + "dependencies": { + "abstract-leveldown": "~2.7.1", + "functional-red-black-tree": "^1.0.1", + "immediate": "^3.2.3", + "inherits": "~2.0.1", + "ltgt": "~2.2.0", + "safe-buffer": "~5.1.1" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/memdown/node_modules/abstract-leveldown": { + "version": "2.7.2", + "dev": true, + "license": "MIT", + "dependencies": { + "xtend": "~4.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/merkle-patricia-tree": { + "version": "2.3.2", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "async": "^1.4.2", + "ethereumjs-util": "^5.0.0", + "level-ws": "0.0.0", + "levelup": "^1.2.1", + "memdown": "^1.0.0", + "readable-stream": "^2.0.0", + "rlp": "^2.0.0", + "semaphore": ">=1.0.1" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/merkle-patricia-tree/node_modules/async": { + "version": "1.5.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/object-keys": { + "version": "0.4.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/safe-buffer": { + "version": "5.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/semver": { + "version": "5.4.1", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/string_decoder": { + "version": "0.10.31", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/web3-provider-engine/node_modules/ws": { + "version": "5.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "async-limiter": "~1.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-providers-http": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "web3-core-helpers": "1.2.11", + "xhr2-cookies": "1.1.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-providers-ipc": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "oboe": "2.1.4", + "underscore": "1.9.1", + "web3-core-helpers": "1.2.11" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-providers-ws": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "eventemitter3": "4.0.4", + "underscore": "1.9.1", + "web3-core-helpers": "1.2.11", + "websocket": "^1.0.31" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-shh": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "web3-core": "1.2.11", + "web3-core-method": "1.2.11", + "web3-core-subscriptions": "1.2.11", + "web3-net": "1.2.11" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-utils": { + "version": "1.2.11", + "dev": true, + "license": "LGPL-3.0", + "optional": true, + "dependencies": { + "bn.js": "^4.11.9", + "eth-lib": "0.2.8", + "ethereum-bloom-filters": "^1.0.6", + "ethjs-unit": "0.1.6", + "number-to-bn": "1.7.0", + "randombytes": "^2.1.0", + "underscore": "1.9.1", + "utf8": "3.0.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ganache-core/node_modules/web3-utils/node_modules/eth-lib": { + "version": "0.2.8", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "bn.js": "^4.11.6", + "elliptic": "^6.4.0", + "xhr-request-promise": "^0.1.2" + } + }, + "node_modules/ganache-core/node_modules/websocket": { + "version": "1.0.32", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "bufferutil": "^4.0.1", + "debug": "^2.2.0", + "es5-ext": "^0.10.50", + "typedarray-to-buffer": "^3.1.5", + "utf-8-validate": "^5.0.2", + "yaeti": "^0.0.6" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/ganache-core/node_modules/websocket/node_modules/debug": { + "version": "2.6.9", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/ganache-core/node_modules/websocket/node_modules/ms": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/whatwg-fetch": { + "version": "2.0.4", + "dev": true, + "license": "MIT" + }, + "node_modules/ganache-core/node_modules/wrappy": { + "version": "1.0.2", + "dev": true, + "license": "ISC" + }, + "node_modules/ganache-core/node_modules/ws": { + "version": "3.3.3", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "async-limiter": "~1.0.0", + "safe-buffer": "~5.1.0", + "ultron": "~1.1.0" + } + }, + "node_modules/ganache-core/node_modules/ws/node_modules/safe-buffer": { + "version": "5.1.2", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/ganache-core/node_modules/xhr": { + "version": "2.6.0", + "dev": true, + "license": "MIT", + "dependencies": { + "global": "~4.4.0", + "is-function": "^1.0.1", + "parse-headers": "^2.0.0", + "xtend": "^4.0.0" + } + }, + "node_modules/ganache-core/node_modules/xhr-request": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "buffer-to-arraybuffer": "^0.0.5", + "object-assign": "^4.1.1", + "query-string": "^5.0.1", + "simple-get": "^2.7.0", + "timed-out": "^4.0.1", + "url-set-query": "^1.0.0", + "xhr": "^2.0.4" + } + }, + "node_modules/ganache-core/node_modules/xhr-request-promise": { + "version": "0.1.3", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "xhr-request": "^1.1.0" + } + }, + "node_modules/ganache-core/node_modules/xhr2-cookies": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "cookiejar": "^2.1.1" + } + }, + "node_modules/ganache-core/node_modules/xtend": { + "version": "4.0.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/ganache-core/node_modules/yaeti": { + "version": "0.0.6", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.32" + } + }, + "node_modules/ganache-core/node_modules/yallist": { + "version": "3.1.1", + "dev": true, + "license": "ISC" + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "peer": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", + "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-symbol-description": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", + "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "dependencies": { + "assert-plus": "^1.0.0" + } + }, + "node_modules/glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/global": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/global/-/global-4.4.0.tgz", + "integrity": "sha512-wv/LAoHdRE3BeTGz53FAamhGlPLhlssK45usmGFThIi4XqnBmjKQ16u+RNbP7WvigRZDxUsM0J3gcQ5yicaL0w==", + "dependencies": { + "min-document": "^2.19.0", + "process": "^0.11.10" + } + }, + "node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.9", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz", + "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==", + "dev": true + }, + "node_modules/growl": { + "version": "1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", + "dev": true, + "engines": { + "node": ">=4.x" + } + }, + "node_modules/har-schema": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", + "engines": { + "node": ">=4" + } + }, + "node_modules/har-validator": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", + "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", + "deprecated": "this library is no longer supported", + "dependencies": { + "ajv": "^6.12.3", + "har-schema": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/hardhat": { + "version": "2.8.2", + "resolved": "https://registry.npmjs.org/hardhat/-/hardhat-2.8.2.tgz", + "integrity": "sha512-cBUqzZGOi+lwKHArWl5Be7zeFIwlu1IUXOna6k5XhORZ8hAWDVbAJBVfxgmjkcX5GffIf0C5g841zRxo36sQ5g==", + "dev": true, + "dependencies": { + "@ethereumjs/block": "^3.6.0", + "@ethereumjs/blockchain": "^5.5.0", + "@ethereumjs/common": "^2.6.0", + "@ethereumjs/tx": "^3.4.0", + "@ethereumjs/vm": "^5.6.0", + "@ethersproject/abi": "^5.1.2", + "@sentry/node": "^5.18.1", + "@solidity-parser/parser": "^0.14.0", + "@types/bn.js": "^5.1.0", + "@types/lru-cache": "^5.1.0", + "abort-controller": "^3.0.0", + "adm-zip": "^0.4.16", + "ansi-escapes": "^4.3.0", + "chalk": "^2.4.2", + "chokidar": "^3.4.0", + "ci-info": "^2.0.0", + "debug": "^4.1.1", + "enquirer": "^2.3.0", + "env-paths": "^2.2.0", + "eth-sig-util": "^2.5.2", + "ethereum-cryptography": "^0.1.2", + "ethereumjs-abi": "^0.6.8", + "ethereumjs-util": "^7.1.3", + "find-up": "^2.1.0", + "fp-ts": "1.19.3", + "fs-extra": "^7.0.1", + "glob": "^7.1.3", + "https-proxy-agent": "^5.0.0", + "immutable": "^4.0.0-rc.12", + "io-ts": "1.10.4", + "lodash": "^4.17.11", + "merkle-patricia-tree": "^4.2.2", + "mnemonist": "^0.38.0", + "mocha": "^7.2.0", + "node-fetch": "^2.6.0", + "qs": "^6.7.0", + "raw-body": "^2.4.1", + "resolve": "1.17.0", + "semver": "^6.3.0", + "slash": "^3.0.0", + "solc": "0.7.3", + "source-map-support": "^0.5.13", + "stacktrace-parser": "^0.1.10", + "true-case-path": "^2.2.1", + "tsort": "0.0.1", + "uuid": "^8.3.2", + "ws": "^7.4.6" + }, + "bin": { + "hardhat": "internal/cli/cli.js" + }, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/hardhat/node_modules/bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true + }, + "node_modules/hardhat/node_modules/ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dev": true, + "dependencies": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/hardhat/node_modules/jsonfile": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", + "integrity": "sha1-NzaitCi4e72gzIO1P6PWM6NcKug=", + "dev": true, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/hardhat/node_modules/level-ws": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/level-ws/-/level-ws-2.0.0.tgz", + "integrity": "sha512-1iv7VXx0G9ec1isqQZ7y5LmoZo/ewAsyDHNA8EFDW5hqH2Kqovm33nSFkSdnLLAK+I5FlT+lo5Cw9itGe+CpQA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "readable-stream": "^3.1.0", + "xtend": "^4.0.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/hardhat/node_modules/merkle-patricia-tree": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/merkle-patricia-tree/-/merkle-patricia-tree-4.2.2.tgz", + "integrity": "sha512-eqZYNTshcYx9aESkSPr71EqwsR/QmpnObDEV4iLxkt/x/IoLYZYjJvKY72voP/27Vy61iMOrfOG6jrn7ttXD+Q==", + "dev": true, + "dependencies": { + "@types/levelup": "^4.3.0", + "ethereumjs-util": "^7.1.2", + "level-mem": "^5.0.1", + "level-ws": "^2.0.0", + "readable-stream": "^3.6.0", + "rlp": "^2.2.4", + "semaphore-async-await": "^1.5.1" + } + }, + "node_modules/hardhat/node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/hardhat/node_modules/resolve": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.17.0.tgz", + "integrity": "sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==", + "dev": true, + "dependencies": { + "path-parse": "^1.0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hardhat/node_modules/solc": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/solc/-/solc-0.7.3.tgz", + "integrity": "sha512-GAsWNAjGzIDg7VxzP6mPjdurby3IkGCjQcM8GFYZT6RyaoUZKmMU6Y7YwG+tFGhv7dwZ8rmR4iwFDrrD99JwqA==", + "dev": true, + "dependencies": { + "command-exists": "^1.2.8", + "commander": "3.0.2", + "follow-redirects": "^1.12.1", + "fs-extra": "^0.30.0", + "js-sha3": "0.8.0", + "memorystream": "^0.3.1", + "require-from-string": "^2.0.0", + "semver": "^5.5.0", + "tmp": "0.0.33" + }, + "bin": { + "solcjs": "solcjs" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/hardhat/node_modules/solc/node_modules/fs-extra": { + "version": "0.30.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-0.30.0.tgz", + "integrity": "sha1-8jP/zAjU2n1DLapEl3aYnbHfk/A=", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.2", + "jsonfile": "^2.1.0", + "klaw": "^1.0.0", + "path-is-absolute": "^1.0.0", + "rimraf": "^2.2.8" + } + }, + "node_modules/hardhat/node_modules/solc/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/hardhat/node_modules/ws": { + "version": "7.5.6", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.6.tgz", + "integrity": "sha512-6GLgCqo2cy2A2rjCNFlxQS6ZljG/coZfZXclldI8FB/1G3CCI36Zd8xy2HrFVACi8tfk5XrgLQEk+P0Tnz9UcA==", + "dev": true, + "engines": { + "node": ">=8.3.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": "^5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-bigints": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.1.tgz", + "integrity": "sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "engines": { + "node": ">=4" + } + }, + "node_modules/has-symbols": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", + "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", + "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "dev": true, + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hash-base": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.1.0.tgz", + "integrity": "sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==", + "dependencies": { + "inherits": "^2.0.4", + "readable-stream": "^3.6.0", + "safe-buffer": "^5.2.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/hash-base/node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/hash-base/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/hash.js": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.7.tgz", + "integrity": "sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==", + "dependencies": { + "inherits": "^2.0.3", + "minimalistic-assert": "^1.0.1" + } + }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "bin": { + "he": "bin/he" + } + }, + "node_modules/hmac-drbg": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz", + "integrity": "sha1-0nRXAQJabHdabFRXk+1QL8DGSaE=", + "dependencies": { + "hash.js": "^1.0.3", + "minimalistic-assert": "^1.0.0", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "node_modules/hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true + }, + "node_modules/http-errors": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz", + "integrity": "sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==", + "dev": true, + "dependencies": { + "depd": "~1.1.2", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/http-signature": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "dependencies": { + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + }, + "engines": { + "node": ">=0.8", + "npm": ">=1.3.7" + } + }, + "node_modules/https-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", + "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", + "dev": true, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dev": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/idna-uts46-hx": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/idna-uts46-hx/-/idna-uts46-hx-2.3.1.tgz", + "integrity": "sha512-PWoF9Keq6laYdIRwwCdhTPl60xRqAloYNMQLiyUnG42VjT53oW07BXIRM+NK7eQjzXjAk2gUvX9caRxlnF9TAA==", + "dev": true, + "dependencies": { + "punycode": "2.1.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/immediate": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.3.0.tgz", + "integrity": "sha512-HR7EVodfFUdQCTIeySw+WDRFJlPcLOJbXfwwZ7Oom6tjsvZ3bOkCDJHehQC3nxJrv7+f9XecwazynjU8e4Vw3Q==" + }, + "node_modules/immutable": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/immutable/-/immutable-4.0.0.tgz", + "integrity": "sha512-zIE9hX70qew5qTUjSS7wi1iwj/l7+m54KWU247nhM3v806UdGj1yDndXj+IOYxxtW9zyLI+xqFNZjTuDaLUqFw==", + "dev": true + }, + "node_modules/import-fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-2.0.0.tgz", + "integrity": "sha1-2BNVwVYS04bGH53dOSLUMEgipUY=", + "dev": true, + "dependencies": { + "caller-path": "^2.0.0", + "resolve-from": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/inquirer": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", + "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", + "dev": true, + "dependencies": { + "ansi-escapes": "^3.2.0", + "chalk": "^2.4.2", + "cli-cursor": "^2.1.0", + "cli-width": "^2.0.0", + "external-editor": "^3.0.3", + "figures": "^2.0.0", + "lodash": "^4.17.12", + "mute-stream": "0.0.7", + "run-async": "^2.2.0", + "rxjs": "^6.4.0", + "string-width": "^2.1.0", + "strip-ansi": "^5.1.0", + "through": "^2.3.6" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/inquirer/node_modules/ansi-escapes": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", + "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/inquirer/node_modules/ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/inquirer/node_modules/strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "dependencies": { + "ansi-regex": "^4.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/internal-slot": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", + "integrity": "sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.1.0", + "has": "^1.0.3", + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/invert-kv": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-1.0.0.tgz", + "integrity": "sha1-EEqOSqym09jNFXqO+L+rLXo//bY=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/io-ts": { + "version": "1.10.4", + "resolved": "https://registry.npmjs.org/io-ts/-/io-ts-1.10.4.tgz", + "integrity": "sha512-b23PteSnYXSONJ6JQXRAlvJhuw8KOtkqa87W4wDtvMrud/DTJd5X+NpOOI+O/zZwVq6v0VLAaJ+1EDViKEuN9g==", + "dev": true, + "dependencies": { + "fp-ts": "^1.0.0" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true + }, + "node_modules/is-bigint": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", + "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", + "dev": true, + "dependencies": { + "has-bigints": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-boolean-object": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", + "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-buffer": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz", + "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "engines": { + "node": ">=4" + } + }, + "node_modules/is-callable": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", + "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-ci": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", + "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", + "dev": true, + "dependencies": { + "ci-info": "^2.0.0" + }, + "bin": { + "is-ci": "bin.js" + } + }, + "node_modules/is-core-module": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz", + "integrity": "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==", + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-date-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", + "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-directory": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/is-directory/-/is-directory-0.3.1.tgz", + "integrity": "sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "dev": true, + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fn": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fn/-/is-fn-1.0.0.tgz", + "integrity": "sha1-lUPV3nvPWwiiLsiiC65uKG1RDYw=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/is-function": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-function/-/is-function-1.0.2.tgz", + "integrity": "sha512-lw7DUp0aWXYg+CBCN+JKkcE0Q2RayZnSvnZBlwgxHBQhqt5pZNVy4Ri7H9GmmXkdu7LUthszM+Tor1u/2iBcpQ==" + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-hex-prefixed": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-hex-prefixed/-/is-hex-prefixed-1.0.0.tgz", + "integrity": "sha1-fY035q135dEnFIkTxXPggtd39VQ=", + "engines": { + "node": ">=6.5.0", + "npm": ">=3" + } + }, + "node_modules/is-negative-zero": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", + "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-number-object": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.6.tgz", + "integrity": "sha512-bEVOqiRcvo3zO1+G2lVMy+gkkEm9Yh7cDMRusKKu5ZJKPUYSJwICTKZrNKHA2EbSP0Tu0+6B/emsYNHZyn6K8g==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-regex": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-shared-array-buffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.1.tgz", + "integrity": "sha512-IU0NmyknYZN0rChcKhRO1X8LYz5Isj/Fsqh8NJOSf+N/hCOTwy29F32Ik7a+QszE63IdvmwdTPDd6cZ5pg4cwA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-string": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-symbol": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", + "dev": true, + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" + }, + "node_modules/is-url": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/is-url/-/is-url-1.2.4.tgz", + "integrity": "sha512-ITvGim8FhRiYe4IQ5uHSkj7pVaPDrCTkNd3yq3cV7iZAcJdHTUMPMEHcqSOy9xZ9qFenQCvi+2wjH9a1nXqHww==", + "dev": true + }, + "node_modules/is-utf8": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz", + "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=", + "dev": true + }, + "node_modules/is-weakref": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "dev": true, + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "node_modules/isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" + }, + "node_modules/js-sha3": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz", + "integrity": "sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==", + "dev": true + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + }, + "node_modules/js-yaml": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "dev": true, + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" + }, + "node_modules/jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "dev": true + }, + "node_modules/json-rpc-engine": { + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/json-rpc-engine/-/json-rpc-engine-5.4.0.tgz", + "integrity": "sha512-rAffKbPoNDjuRnXkecTjnsE3xLLrb00rEkdgalINhaYVYIxDwWtvYBr9UFbhTvPB1B2qUOLoFd/cV6f4Q7mh7g==", + "dependencies": { + "eth-rpc-errors": "^3.0.0", + "safe-event-emitter": "^1.0.1" + } + }, + "node_modules/json-rpc-random-id": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-rpc-random-id/-/json-rpc-random-id-1.0.1.tgz", + "integrity": "sha1-uknZat7RRE27jaPSA3SKy7zeyMg=" + }, + "node_modules/json-schema": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, + "node_modules/json-stable-stringify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz", + "integrity": "sha1-mnWdOcXy/1A/1TAGRu1EX4jE+a8=", + "dependencies": { + "jsonify": "~0.0.0" + } + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", + "dev": true + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" + }, + "node_modules/json5": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", + "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==", + "peer": true, + "dependencies": { + "minimist": "^1.2.5" + }, + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonfile": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", + "integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=", + "dev": true, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/jsonify": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/jsonify/-/jsonify-0.0.0.tgz", + "integrity": "sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM=", + "engines": { + "node": "*" + } + }, + "node_modules/jsprim": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", + "integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==", + "dependencies": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.4.0", + "verror": "1.10.0" + }, + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/keccak": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/keccak/-/keccak-3.0.2.tgz", + "integrity": "sha512-PyKKjkH53wDMLGrvmRGSNWgmSxZOUqbnXwKL9tmgbFYA1iAYqW21kfR7mZXV0MlESiefxQQE9X9fTa3X+2MPDQ==", + "hasInstallScript": true, + "dependencies": { + "node-addon-api": "^2.0.0", + "node-gyp-build": "^4.2.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/keccak/node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/klaw": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz", + "integrity": "sha1-QIhDO0azsbolnXh4XY6W9zugJDk=", + "dev": true, + "optionalDependencies": { + "graceful-fs": "^4.1.9" + } + }, + "node_modules/klaw-sync": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/klaw-sync/-/klaw-sync-6.0.0.tgz", + "integrity": "sha512-nIeuVSzdCCs6TDPTqI8w1Yre34sSq7AkZ4B3sfOBbI2CgVSB4Du4aLQijFU2+lhAFCwt9+42Hel6lQNIv6AntQ==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.11" + } + }, + "node_modules/lcid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lcid/-/lcid-1.0.0.tgz", + "integrity": "sha1-MIrMr6C8SDo4Z7S28rlQYlHRuDU=", + "dev": true, + "dependencies": { + "invert-kv": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/level-codec": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/level-codec/-/level-codec-9.0.2.tgz", + "integrity": "sha512-UyIwNb1lJBChJnGfjmO0OR+ezh2iVu1Kas3nvBS/BzGnx79dv6g7unpKIDNPMhfdTEGoc7mC8uAu51XEtX+FHQ==", + "dev": true, + "dependencies": { + "buffer": "^5.6.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/level-concat-iterator": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz", + "integrity": "sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/level-errors": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/level-errors/-/level-errors-2.0.1.tgz", + "integrity": "sha512-UVprBJXite4gPS+3VznfgDSU8PTRuVX0NXwoWW50KLxd2yw4Y1t2JUR5In1itQnudZqRMT9DlAM3Q//9NCjCFw==", + "dev": true, + "dependencies": { + "errno": "~0.1.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/level-iterator-stream": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/level-iterator-stream/-/level-iterator-stream-4.0.2.tgz", + "integrity": "sha512-ZSthfEqzGSOMWoUGhTXdX9jv26d32XJuHz/5YnuHZzH6wldfWMOVwI9TBtKcya4BKTyTt3XVA0A3cF3q5CY30Q==", + "dev": true, + "dependencies": { + "inherits": "^2.0.4", + "readable-stream": "^3.4.0", + "xtend": "^4.0.2" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/level-iterator-stream/node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/level-mem": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/level-mem/-/level-mem-5.0.1.tgz", + "integrity": "sha512-qd+qUJHXsGSFoHTziptAKXoLX87QjR7v2KMbqncDXPxQuCdsQlzmyX+gwrEHhlzn08vkf8TyipYyMmiC6Gobzg==", + "dev": true, + "dependencies": { + "level-packager": "^5.0.3", + "memdown": "^5.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/level-packager": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/level-packager/-/level-packager-5.1.1.tgz", + "integrity": "sha512-HMwMaQPlTC1IlcwT3+swhqf/NUO+ZhXVz6TY1zZIIZlIR0YSn8GtAAWmIvKjNY16ZkEg/JcpAuQskxsXqC0yOQ==", + "dev": true, + "dependencies": { + "encoding-down": "^6.3.0", + "levelup": "^4.3.2" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/level-supports": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-1.0.1.tgz", + "integrity": "sha512-rXM7GYnW8gsl1vedTJIbzOrRv85c/2uCMpiiCzO2fndd06U/kUXEEU9evYn4zFggBOg36IsBW8LzqIpETwwQzg==", + "dev": true, + "dependencies": { + "xtend": "^4.0.2" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/level-ws": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/level-ws/-/level-ws-0.0.0.tgz", + "integrity": "sha1-Ny5RIXeSSgBCSwtDrvK7QkltIos=", + "dependencies": { + "readable-stream": "~1.0.15", + "xtend": "~2.1.1" + } + }, + "node_modules/level-ws/node_modules/isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "node_modules/level-ws/node_modules/object-keys": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-0.4.0.tgz", + "integrity": "sha1-KKaq50KN0sOpLz2V8hM13SBOAzY=" + }, + "node_modules/level-ws/node_modules/readable-stream": { + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/level-ws/node_modules/string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + }, + "node_modules/level-ws/node_modules/xtend": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-2.1.2.tgz", + "integrity": "sha1-bv7MKk2tjmlixJAbM3znuoe10os=", + "dependencies": { + "object-keys": "~0.4.0" + }, + "engines": { + "node": ">=0.4" + } + }, + "node_modules/levelup": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/levelup/-/levelup-4.4.0.tgz", + "integrity": "sha512-94++VFO3qN95cM/d6eBXvd894oJE0w3cInq9USsyQzzoJxmiYzPAocNcuGCPGGjoXqDVJcr3C1jzt1TSjyaiLQ==", + "dev": true, + "dependencies": { + "deferred-leveldown": "~5.3.0", + "level-errors": "~2.0.0", + "level-iterator-stream": "~4.0.0", + "level-supports": "~1.0.0", + "xtend": "~4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "dev": true, + "dependencies": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/load-json-file": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz", + "integrity": "sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA=", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.2", + "parse-json": "^2.2.0", + "pify": "^2.0.0", + "pinkie-promise": "^2.0.0", + "strip-bom": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/load-json-file/node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "dev": true, + "dependencies": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "node_modules/lodash.assign": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/lodash.assign/-/lodash.assign-4.2.0.tgz", + "integrity": "sha1-DZnzzNem0mHRm9rrkkUAXShYCOc=", + "dev": true + }, + "node_modules/lodash.debounce": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", + "integrity": "sha1-gteb/zCmfEAF/9XiUVMArZyk168=" + }, + "node_modules/lodash.flatmap": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.flatmap/-/lodash.flatmap-4.5.0.tgz", + "integrity": "sha1-74y/QI9uSCaGYzRTBcaswLd4cC4=" + }, + "node_modules/log-symbols": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-3.0.0.tgz", + "integrity": "sha512-dSkNGuI7iG3mfvDzUuYZyvk5dD9ocYCYzNU6CYDE6+Xqd+gwme6Z00NS3dUh8mq/73HaEtT7m6W+yUPtU6BZnQ==", + "dev": true, + "dependencies": { + "chalk": "^2.4.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lru_map": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/lru_map/-/lru_map-0.3.3.tgz", + "integrity": "sha1-tcg1G5Rky9dQM1p5ZQoOwOVhGN0=", + "dev": true + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/ltgt": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/ltgt/-/ltgt-2.2.1.tgz", + "integrity": "sha1-81ypHEk/e3PaDgdJUwTxezH4fuU=" + }, + "node_modules/mcl-wasm": { + "version": "0.7.9", + "resolved": "https://registry.npmjs.org/mcl-wasm/-/mcl-wasm-0.7.9.tgz", + "integrity": "sha512-iJIUcQWA88IJB/5L15GnJVnSQJmf/YaxxV6zRavv83HILHaJQb6y0iFyDMdDO0gN8X37tdxmAOrH/P8B6RB8sQ==", + "dev": true, + "engines": { + "node": ">=8.9.0" + } + }, + "node_modules/md5.js": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", + "integrity": "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==", + "dependencies": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "node_modules/memdown": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/memdown/-/memdown-5.1.0.tgz", + "integrity": "sha512-B3J+UizMRAlEArDjWHTMmadet+UKwHd3UjMgGBkZcKAxAYVPS9o0Yeiha4qvz7iGiL2Sb3igUft6p7nbFWctpw==", + "dev": true, + "dependencies": { + "abstract-leveldown": "~6.2.1", + "functional-red-black-tree": "~1.0.1", + "immediate": "~3.2.3", + "inherits": "~2.0.1", + "ltgt": "~2.2.0", + "safe-buffer": "~5.2.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/memdown/node_modules/abstract-leveldown": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz", + "integrity": "sha512-BsLm5vFMRUrrLeCcRc+G0t2qOaTzpoJQLOubq2XM72eNpjF5UdU5o/5NvlNhx95XHcAvcl8OMXr4mlg/fRgUXQ==", + "dev": true, + "dependencies": { + "buffer": "^5.5.0", + "immediate": "^3.2.3", + "level-concat-iterator": "~2.0.0", + "level-supports": "~1.0.0", + "xtend": "~4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/memdown/node_modules/immediate": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.2.3.tgz", + "integrity": "sha1-0UD6j2FGWb1lQSMwl92qwlzdmRw=", + "dev": true + }, + "node_modules/memdown/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/memorystream": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz", + "integrity": "sha1-htcJCzDORV1j+64S3aUaR93K+bI=", + "dev": true, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/merkle-patricia-tree": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/merkle-patricia-tree/-/merkle-patricia-tree-2.3.2.tgz", + "integrity": "sha512-81PW5m8oz/pz3GvsAwbauj7Y00rqm81Tzad77tHBwU7pIAtN+TJnMSOJhxBKflSVYhptMMb9RskhqHqrSm1V+g==", + "dependencies": { + "async": "^1.4.2", + "ethereumjs-util": "^5.0.0", + "level-ws": "0.0.0", + "levelup": "^1.2.1", + "memdown": "^1.0.0", + "readable-stream": "^2.0.0", + "rlp": "^2.0.0", + "semaphore": ">=1.0.1" + } + }, + "node_modules/merkle-patricia-tree/node_modules/abstract-leveldown": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-2.6.3.tgz", + "integrity": "sha512-2++wDf/DYqkPR3o5tbfdhF96EfMApo1GpPfzOsR/ZYXdkSmELlvOOEAl9iKkRsktMPHdGjO4rtkBpf2I7TiTeA==", + "dependencies": { + "xtend": "~4.0.0" + } + }, + "node_modules/merkle-patricia-tree/node_modules/async": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz", + "integrity": "sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo=" + }, + "node_modules/merkle-patricia-tree/node_modules/deferred-leveldown": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/deferred-leveldown/-/deferred-leveldown-1.2.2.tgz", + "integrity": "sha512-uukrWD2bguRtXilKt6cAWKyoXrTSMo5m7crUdLfWQmu8kIm88w3QZoUL+6nhpfKVmhHANER6Re3sKoNoZ3IKMA==", + "dependencies": { + "abstract-leveldown": "~2.6.0" + } + }, + "node_modules/merkle-patricia-tree/node_modules/ethereumjs-util": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz", + "integrity": "sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ==", + "dependencies": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/merkle-patricia-tree/node_modules/isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "node_modules/merkle-patricia-tree/node_modules/level-codec": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/level-codec/-/level-codec-7.0.1.tgz", + "integrity": "sha512-Ua/R9B9r3RasXdRmOtd+t9TCOEIIlts+TN/7XTT2unhDaL6sJn83S3rUyljbr6lVtw49N3/yA0HHjpV6Kzb2aQ==" + }, + "node_modules/merkle-patricia-tree/node_modules/level-errors": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/level-errors/-/level-errors-1.0.5.tgz", + "integrity": "sha512-/cLUpQduF6bNrWuAC4pwtUKA5t669pCsCi2XbmojG2tFeOr9j6ShtdDCtFFQO1DRt+EVZhx9gPzP9G2bUaG4ig==", + "dependencies": { + "errno": "~0.1.1" + } + }, + "node_modules/merkle-patricia-tree/node_modules/level-iterator-stream": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/level-iterator-stream/-/level-iterator-stream-1.3.1.tgz", + "integrity": "sha1-5Dt4sagUPm+pek9IXrjqUwNS8u0=", + "dependencies": { + "inherits": "^2.0.1", + "level-errors": "^1.0.3", + "readable-stream": "^1.0.33", + "xtend": "^4.0.0" + } + }, + "node_modules/merkle-patricia-tree/node_modules/level-iterator-stream/node_modules/readable-stream": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/merkle-patricia-tree/node_modules/levelup": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/levelup/-/levelup-1.3.9.tgz", + "integrity": "sha512-VVGHfKIlmw8w1XqpGOAGwq6sZm2WwWLmlDcULkKWQXEA5EopA8OBNJ2Ck2v6bdk8HeEZSbCSEgzXadyQFm76sQ==", + "dependencies": { + "deferred-leveldown": "~1.2.1", + "level-codec": "~7.0.0", + "level-errors": "~1.0.3", + "level-iterator-stream": "~1.3.0", + "prr": "~1.0.1", + "semver": "~5.4.1", + "xtend": "~4.0.0" + } + }, + "node_modules/merkle-patricia-tree/node_modules/memdown": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/memdown/-/memdown-1.4.1.tgz", + "integrity": "sha1-tOThkhdGZP+65BNhqlAPMRnv4hU=", + "dependencies": { + "abstract-leveldown": "~2.7.1", + "functional-red-black-tree": "^1.0.1", + "immediate": "^3.2.3", + "inherits": "~2.0.1", + "ltgt": "~2.2.0", + "safe-buffer": "~5.1.1" + } + }, + "node_modules/merkle-patricia-tree/node_modules/memdown/node_modules/abstract-leveldown": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-2.7.2.tgz", + "integrity": "sha512-+OVvxH2rHVEhWLdbudP6p0+dNMXu8JA1CbhP19T8paTYAcX7oJ4OVjT+ZUVpv7mITxXHqDMej+GdqXBmXkw09w==", + "dependencies": { + "xtend": "~4.0.0" + } + }, + "node_modules/merkle-patricia-tree/node_modules/semver": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.4.1.tgz", + "integrity": "sha512-WfG/X9+oATh81XtllIo/I8gOiY9EXRdv1cQdyykeXK17YcUW3EXUAi2To4pcH6nZtJPr7ZOpM5OMyWJZm+8Rsg==", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/merkle-patricia-tree/node_modules/string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + }, + "node_modules/micromatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", + "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", + "dev": true, + "dependencies": { + "braces": "^3.0.1", + "picomatch": "^2.2.3" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/miller-rabin": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/miller-rabin/-/miller-rabin-4.0.1.tgz", + "integrity": "sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==", + "dev": true, + "dependencies": { + "bn.js": "^4.0.0", + "brorand": "^1.0.1" + }, + "bin": { + "miller-rabin": "bin/miller-rabin" + } + }, + "node_modules/mime-db": { + "version": "1.51.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", + "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.34", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", + "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", + "dependencies": { + "mime-db": "1.51.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/min-document": { + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz", + "integrity": "sha1-e9KC4/WELtKVu3SM3Z8f+iyCRoU=", + "dependencies": { + "dom-walk": "^0.1.0" + } + }, + "node_modules/minimalistic-assert": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", + "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==" + }, + "node_modules/minimalistic-crypto-utils": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz", + "integrity": "sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo=" + }, + "node_modules/minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" + }, + "node_modules/mkdirp": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "dev": true, + "dependencies": { + "minimist": "^1.2.5" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/mnemonist": { + "version": "0.38.5", + "resolved": "https://registry.npmjs.org/mnemonist/-/mnemonist-0.38.5.tgz", + "integrity": "sha512-bZTFT5rrPKtPJxj8KSV0WkPyNxl72vQepqqVUAW2ARUpUSF2qXMB6jZj7hW5/k7C1rtpzqbD/IIbJwLXUjCHeg==", + "dev": true, + "dependencies": { + "obliterator": "^2.0.0" + } + }, + "node_modules/mocha": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-7.2.0.tgz", + "integrity": "sha512-O9CIypScywTVpNaRrCAgoUnJgozpIofjKUYmJhiCIJMiuYnLI6otcb1/kpW9/n/tJODHGZ7i8aLQoDVsMtOKQQ==", + "dev": true, + "dependencies": { + "ansi-colors": "3.2.3", + "browser-stdout": "1.3.1", + "chokidar": "3.3.0", + "debug": "3.2.6", + "diff": "3.5.0", + "escape-string-regexp": "1.0.5", + "find-up": "3.0.0", + "glob": "7.1.3", + "growl": "1.10.5", + "he": "1.2.0", + "js-yaml": "3.13.1", + "log-symbols": "3.0.0", + "minimatch": "3.0.4", + "mkdirp": "0.5.5", + "ms": "2.1.1", + "node-environment-flags": "1.0.6", + "object.assign": "4.1.0", + "strip-json-comments": "2.0.1", + "supports-color": "6.0.0", + "which": "1.3.1", + "wide-align": "1.1.3", + "yargs": "13.3.2", + "yargs-parser": "13.1.2", + "yargs-unparser": "1.6.0" + }, + "bin": { + "_mocha": "bin/_mocha", + "mocha": "bin/mocha" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/mochajs" + } + }, + "node_modules/mocha/node_modules/ansi-colors": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.3.tgz", + "integrity": "sha512-LEHHyuhlPY3TmuUYMh2oz89lTShfvgbmzaBcxve9t/9Wuy7Dwf4yoAKcND7KFT1HAQfqZ12qtc+DUrBMeKF9nw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/mocha/node_modules/chokidar": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.3.0.tgz", + "integrity": "sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A==", + "dev": true, + "dependencies": { + "anymatch": "~3.1.1", + "braces": "~3.0.2", + "glob-parent": "~5.1.0", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.2.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "optionalDependencies": { + "fsevents": "~2.1.1" + } + }, + "node_modules/mocha/node_modules/debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "deprecated": "Debug versions >=3.2.0 <3.2.7 || >=4 <4.3.1 have a low-severity ReDos regression when used in a Node.js environment. It is recommended you upgrade to 3.2.7 or 4.3.1. (https://github.com/visionmedia/debug/issues/797)", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/mocha/node_modules/find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "dependencies": { + "locate-path": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/mocha/node_modules/fsevents": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.3.tgz", + "integrity": "sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ==", + "deprecated": "\"Please update to latest v2.3 or v2.2\"", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/mocha/node_modules/glob": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", + "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + } + }, + "node_modules/mocha/node_modules/locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "dependencies": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/mocha/node_modules/ms": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", + "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", + "dev": true + }, + "node_modules/mocha/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mocha/node_modules/p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "dependencies": { + "p-limit": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/mocha/node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/mocha/node_modules/readdirp": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.2.0.tgz", + "integrity": "sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ==", + "dev": true, + "dependencies": { + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/mocha/node_modules/supports-color": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.0.0.tgz", + "integrity": "sha512-on9Kwidc1IUQo+bQdhi8+Tijpo0e1SS6RoGo2guUwn5vdaxw8RXOF9Vb2ws+ihWOmh4JnCJOvaziZWP1VABaLg==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/mute-stream": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", + "dev": true + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", + "dev": true + }, + "node_modules/nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "dev": true + }, + "node_modules/node-addon-api": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-2.0.2.tgz", + "integrity": "sha512-Ntyt4AIXyaLIuMHF6IOoTakB3K+RWxwtsHNRxllEoA6vPwP9o4866g6YWDLUdnucilZhmkxiHwHr11gAENw+QA==" + }, + "node_modules/node-environment-flags": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/node-environment-flags/-/node-environment-flags-1.0.6.tgz", + "integrity": "sha512-5Evy2epuL+6TM0lCQGpFIj6KwiEsGh1SrHUhTbNX+sLbBtjidPZFAnVK9y5yU1+h//RitLbRHTIMyxQPtxMdHw==", + "dev": true, + "dependencies": { + "object.getownpropertydescriptors": "^2.0.3", + "semver": "^5.7.0" + } + }, + "node_modules/node-environment-flags/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/node-fetch": { + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "dev": true, + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/node-gyp-build": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==", + "bin": { + "node-gyp-build": "bin.js", + "node-gyp-build-optional": "optional.js", + "node-gyp-build-test": "build-test.js" + } + }, + "node_modules/node-releases": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.1.tgz", + "integrity": "sha512-CqyzN6z7Q6aMeF/ktcMVTzhAHCEpf8SOarwpzpf8pNBY2k5/oM34UHldUwp8VKI7uxct2HxSRdJjBaZeESzcxA==" + }, + "node_modules/nofilter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/nofilter/-/nofilter-1.0.4.tgz", + "integrity": "sha512-N8lidFp+fCz+TD51+haYdbDGrcBWwuHX40F5+z0qkUjMJ5Tp+rdSuAkMJ9N9eoolDlEVTf6u5icM+cNKkKW2mA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "dependencies": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "node_modules/normalize-package-data/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/number-to-bn": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/number-to-bn/-/number-to-bn-1.7.0.tgz", + "integrity": "sha1-uzYjWS9+X54AMLGXe9QaDFP+HqA=", + "dev": true, + "dependencies": { + "bn.js": "4.11.6", + "strip-hex-prefix": "1.0.0" + }, + "engines": { + "node": ">=6.5.0", + "npm": ">=3" + } + }, + "node_modules/number-to-bn/node_modules/bn.js": { + "version": "4.11.6", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.6.tgz", + "integrity": "sha1-UzRK2xRhehP26N0s4okF0cC6MhU=", + "dev": true + }, + "node_modules/oauth-sign": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", + "engines": { + "node": "*" + } + }, + "node_modules/object-inspect": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", + "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.assign": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", + "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", + "dev": true, + "dependencies": { + "define-properties": "^1.1.2", + "function-bind": "^1.1.1", + "has-symbols": "^1.0.0", + "object-keys": "^1.0.11" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.getownpropertydescriptors": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.3.tgz", + "integrity": "sha512-VdDoCwvJI4QdC6ndjpqFmoL3/+HxffFBbcJzKi5hwLLqqx3mdbedRpfZDdK0SrOSauj8X4GzBvnDZl4vTN7dOw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.1" + }, + "engines": { + "node": ">= 0.8" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/obliterator": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/obliterator/-/obliterator-2.0.1.tgz", + "integrity": "sha512-XnkiCrrBcIZQitJPAI36mrrpEUvatbte8hLcTcQwKA1v9NkCKasSi+UAguLsLDs/out7MoRzAlmz7VXvY6ph6w==", + "dev": true + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", + "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "dev": true, + "dependencies": { + "mimic-fn": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/open": { + "version": "7.4.2", + "resolved": "https://registry.npmjs.org/open/-/open-7.4.2.tgz", + "integrity": "sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q==", + "dev": true, + "dependencies": { + "is-docker": "^2.0.0", + "is-wsl": "^2.1.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dev": true, + "dependencies": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/os-locale": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-1.4.0.tgz", + "integrity": "sha1-IPnxeuKe00XoveWDsT0gCYA8FNk=", + "dev": true, + "dependencies": { + "lcid": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "dev": true, + "dependencies": { + "p-try": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "dev": true, + "dependencies": { + "p-limit": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parent-module/node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-headers": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/parse-headers/-/parse-headers-2.0.4.tgz", + "integrity": "sha512-psZ9iZoCNFLrgRjZ1d8mn0h9WRqJwFxM9q3x7iUjN/YT2OksthDJ5TiPCu2F38kS4zutqfW+YdVVkBZZx3/1aw==" + }, + "node_modules/parse-json": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "dev": true, + "dependencies": { + "error-ex": "^1.2.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/patch-package": { + "version": "6.4.7", + "resolved": "https://registry.npmjs.org/patch-package/-/patch-package-6.4.7.tgz", + "integrity": "sha512-S0vh/ZEafZ17hbhgqdnpunKDfzHQibQizx9g8yEf5dcVk3KOflOfdufRXQX8CSEkyOQwuM/bNz1GwKvFj54kaQ==", + "dev": true, + "dependencies": { + "@yarnpkg/lockfile": "^1.1.0", + "chalk": "^2.4.2", + "cross-spawn": "^6.0.5", + "find-yarn-workspace-root": "^2.0.0", + "fs-extra": "^7.0.1", + "is-ci": "^2.0.0", + "klaw-sync": "^6.0.0", + "minimist": "^1.2.0", + "open": "^7.4.2", + "rimraf": "^2.6.3", + "semver": "^5.6.0", + "slash": "^2.0.0", + "tmp": "^0.0.33" + }, + "bin": { + "patch-package": "index.js" + }, + "engines": { + "npm": ">5" + } + }, + "node_modules/patch-package/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/patch-package/node_modules/slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-browserify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz", + "integrity": "sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==", + "dev": true + }, + "node_modules/path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-is-inside": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", + "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=", + "dev": true + }, + "node_modules/path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" + }, + "node_modules/path-type": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz", + "integrity": "sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE=", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.2", + "pify": "^2.0.0", + "pinkie-promise": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-type/node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/pbkdf2": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz", + "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==", + "dependencies": { + "create-hash": "^1.1.2", + "create-hmac": "^1.1.4", + "ripemd160": "^2.0.1", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + }, + "engines": { + "node": ">=0.12" + } + }, + "node_modules/performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" + }, + "node_modules/picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", + "engines": { + "node": ">=4" + } + }, + "node_modules/pinkie": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pinkie-promise": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "integrity": "sha1-ITXW36ejWMBprJsXh3YogihFD/o=", + "dev": true, + "dependencies": { + "pinkie": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postinstall-postinstall": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/postinstall-postinstall/-/postinstall-postinstall-2.1.0.tgz", + "integrity": "sha512-7hQX6ZlZXIoRiWNrbMQaLzUUfH+sSx39u8EJ9HYuDc1kLo9IXKWjM5RSquZN1ad5GnH8CGFM78fsAAQi3OKEEQ==", + "dev": true, + "hasInstallScript": true + }, + "node_modules/precond": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/precond/-/precond-0.2.3.tgz", + "integrity": "sha1-qpWRvKokkj8eD0hJ0kD0fvwQdaw=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.5.1.tgz", + "integrity": "sha512-vBZcPRUR5MZJwoyi3ZoyQlc1rXeEck8KgeC9AwwOn+exuxLxq5toTRDTSaVrXHxelDMHy9zlicw8u66yxoSUFg==", + "dev": true, + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/printj": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/printj/-/printj-1.1.2.tgz", + "integrity": "sha512-zA2SmoLaxZyArQTOPj5LXecR+RagfPSU5Kw1qP+jkWeNlrq+eJZyY2oS68SU1Z/7/myXM4lo9716laOFAVStCQ==", + "dev": true, + "bin": { + "printj": "bin/printj.njs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + }, + "node_modules/progress": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/promise-to-callback": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/promise-to-callback/-/promise-to-callback-1.0.0.tgz", + "integrity": "sha1-XSp0kBC/tn2WNZj805YHRqaP7vc=", + "dependencies": { + "is-fn": "^1.0.0", + "set-immediate-shim": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/prr": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", + "integrity": "sha1-0/wRS6BplaRexok/SEzrHXj19HY=" + }, + "node_modules/psl": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", + "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" + }, + "node_modules/punycode": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.0.tgz", + "integrity": "sha1-X4Y+3Im5bbCQdLrXlHvwkFbKTn0=", + "engines": { + "node": ">=6" + } + }, + "node_modules/qs": { + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", + "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", + "dev": true, + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/querystring": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.1.tgz", + "integrity": "sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg==", + "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", + "dev": true, + "engines": { + "node": ">=0.4.x" + } + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/raw-body": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.2.tgz", + "integrity": "sha512-RPMAFUJP19WIet/99ngh6Iv8fzAbqum4Li7AD6DtGaW2RpMB/11xDoalPiJMTbu6I3hkbMVkATvZrqb9EEqeeQ==", + "dev": true, + "dependencies": { + "bytes": "3.1.1", + "http-errors": "1.8.1", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/read-pkg": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz", + "integrity": "sha1-9f+qXs0pyzHAR0vKfXVra7KePyg=", + "dev": true, + "dependencies": { + "load-json-file": "^1.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/read-pkg-up": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz", + "integrity": "sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI=", + "dev": true, + "dependencies": { + "find-up": "^1.0.0", + "read-pkg": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/read-pkg-up/node_modules/find-up": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz", + "integrity": "sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8=", + "dev": true, + "dependencies": { + "path-exists": "^2.0.0", + "pinkie-promise": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/read-pkg-up/node_modules/path-exists": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz", + "integrity": "sha1-D+tsZPD8UY2adU3V77YscCJ2H0s=", + "dev": true, + "dependencies": { + "pinkie-promise": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/regenerator-runtime": { + "version": "0.13.9", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", + "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==" + }, + "node_modules/regexpp": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", + "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", + "dev": true, + "engines": { + "node": ">=6.5.0" + } + }, + "node_modules/request": { + "version": "2.88.2", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", + "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", + "deprecated": "request has been deprecated, see https://github.com/request/request/issues/3142", + "dependencies": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.3", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.5.0", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/request/node_modules/form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/request/node_modules/qs": { + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", + "integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/request/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "node_modules/resolve": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.21.0.tgz", + "integrity": "sha512-3wCbTpk5WJlyE4mSOtDLhqQmGFi0/TD9VPwmiolnk8U0wRgMEktqCXd3vy5buTO3tljvalNvKrjHEfrd2WpEKA==", + "dependencies": { + "is-core-module": "^2.8.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", + "integrity": "sha1-six699nWiBvItuZTM17rywoYh0g=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/restore-cursor": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", + "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "dev": true, + "dependencies": { + "onetime": "^2.0.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/ripemd160": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz", + "integrity": "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==", + "dependencies": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1" + } + }, + "node_modules/rlp": { + "version": "2.2.7", + "resolved": "https://registry.npmjs.org/rlp/-/rlp-2.2.7.tgz", + "integrity": "sha512-d5gdPmgQ0Z+AklL2NVXr/IoSjNZFfTVvQWzL/AM2AOcSzYP2xjlb0AC8YyCLc41MSNf6P6QVtjgPdmVtzb+4lQ==", + "dependencies": { + "bn.js": "^5.2.0" + }, + "bin": { + "rlp": "bin/rlp" + } + }, + "node_modules/rlp/node_modules/bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==" + }, + "node_modules/run-async": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", + "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/rustbn.js": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/rustbn.js/-/rustbn.js-0.2.0.tgz", + "integrity": "sha512-4VlvkRUuCJvr2J6Y0ImW7NvTCriMi7ErOAqWk1y69vAdoNIzCF3yPmgeNzx+RQTLEDFq5sHfscn1MwHxP9hNfA==" + }, + "node_modules/rxjs": { + "version": "6.6.7", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.7.tgz", + "integrity": "sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==", + "dev": true, + "dependencies": { + "tslib": "^1.9.0" + }, + "engines": { + "npm": ">=2.0.0" + } + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/safe-event-emitter": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/safe-event-emitter/-/safe-event-emitter-1.0.1.tgz", + "integrity": "sha512-e1wFe99A91XYYxoQbcq2ZJUWurxEyP8vfz7A7vuUe1s95q8r5ebraVaA1BukYJcpM6V16ugWoD9vngi8Ccu5fg==", + "deprecated": "Renamed to @metamask/safe-event-emitter", + "dependencies": { + "events": "^3.0.0" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "node_modules/scrypt-js": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/scrypt-js/-/scrypt-js-3.0.1.tgz", + "integrity": "sha512-cdwTTnqPu0Hyvf5in5asVdZocVDTNRmR7XEcJuIzMjJeSHybHl7vpB66AzwTaIg6CLSbtjcxc8fqcySfnTkccA==" + }, + "node_modules/secp256k1": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/secp256k1/-/secp256k1-4.0.3.tgz", + "integrity": "sha512-NLZVf+ROMxwtEj3Xa562qgv2BK5e2WNmXPiOdVIPLgs6lyTzMvBq0aWTYMI5XCP9jZMVKOcqZLw/Wc4vDkuxhA==", + "hasInstallScript": true, + "dependencies": { + "elliptic": "^6.5.4", + "node-addon-api": "^2.0.0", + "node-gyp-build": "^4.2.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/semaphore": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/semaphore/-/semaphore-1.1.0.tgz", + "integrity": "sha512-O4OZEaNtkMd/K0i6js9SL+gqy0ZCBMgUvlSqHKi4IBdjhe7wB8pwztUk1BbZ1fmrvpwFrPbHzqd2w5pTcJH6LA==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/semaphore-async-await": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/semaphore-async-await/-/semaphore-async-await-1.5.1.tgz", + "integrity": "sha1-hXvvXjZEYBykuVcLh+nfXKEpdPo=", + "dev": true, + "engines": { + "node": ">=4.1" + } + }, + "node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "dev": true + }, + "node_modules/set-immediate-shim": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz", + "integrity": "sha1-SysbJ+uAip+NzEgaWOXlb1mfP2E=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU=" + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "dev": true + }, + "node_modules/sha.js": { + "version": "2.4.11", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", + "dependencies": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + }, + "bin": { + "sha.js": "bin.js" + } + }, + "node_modules/shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "dev": true, + "dependencies": { + "shebang-regex": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.6.tgz", + "integrity": "sha512-sDl4qMFpijcGw22U5w63KmD3cZJfBuFlVNbVMKje2keoKML7X2UzWbc4XrmEbDwg0NXJc3yv4/ox7b+JWb57kQ==", + "dev": true + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/slice-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz", + "integrity": "sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.0", + "astral-regex": "^1.0.0", + "is-fullwidth-code-point": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/solc": { + "version": "0.6.12", + "resolved": "https://registry.npmjs.org/solc/-/solc-0.6.12.tgz", + "integrity": "sha512-Lm0Ql2G9Qc7yPP2Ba+WNmzw2jwsrd3u4PobHYlSOxaut3TtUbj9+5ZrT6f4DUpNPEoBaFUOEg9Op9C0mk7ge9g==", + "dev": true, + "dependencies": { + "command-exists": "^1.2.8", + "commander": "3.0.2", + "fs-extra": "^0.30.0", + "js-sha3": "0.8.0", + "memorystream": "^0.3.1", + "require-from-string": "^2.0.0", + "semver": "^5.5.0", + "tmp": "0.0.33" + }, + "bin": { + "solcjs": "solcjs" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/solc/node_modules/fs-extra": { + "version": "0.30.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-0.30.0.tgz", + "integrity": "sha1-8jP/zAjU2n1DLapEl3aYnbHfk/A=", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.2", + "jsonfile": "^2.1.0", + "klaw": "^1.0.0", + "path-is-absolute": "^1.0.0", + "rimraf": "^2.2.8" + } + }, + "node_modules/solc/node_modules/jsonfile": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", + "integrity": "sha1-NzaitCi4e72gzIO1P6PWM6NcKug=", + "dev": true, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/solc/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/solhint": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/solhint/-/solhint-3.3.6.tgz", + "integrity": "sha512-HWUxTAv2h7hx3s3hAab3ifnlwb02ZWhwFU/wSudUHqteMS3ll9c+m1FlGn9V8ztE2rf3Z82fQZA005Wv7KpcFA==", + "dev": true, + "dependencies": { + "@solidity-parser/parser": "^0.13.2", + "ajv": "^6.6.1", + "antlr4": "4.7.1", + "ast-parents": "0.0.1", + "chalk": "^2.4.2", + "commander": "2.18.0", + "cosmiconfig": "^5.0.7", + "eslint": "^5.6.0", + "fast-diff": "^1.1.2", + "glob": "^7.1.3", + "ignore": "^4.0.6", + "js-yaml": "^3.12.0", + "lodash": "^4.17.11", + "semver": "^6.3.0" + }, + "bin": { + "solhint": "solhint.js" + }, + "optionalDependencies": { + "prettier": "^1.14.3" + } + }, + "node_modules/solhint/node_modules/@solidity-parser/parser": { + "version": "0.13.2", + "resolved": "https://registry.npmjs.org/@solidity-parser/parser/-/parser-0.13.2.tgz", + "integrity": "sha512-RwHnpRnfrnD2MSPveYoPh8nhofEvX7fgjHk1Oq+NNvCcLx4r1js91CO9o+F/F3fBzOCyvm8kKRTriFICX/odWw==", + "dev": true, + "dependencies": { + "antlr4ts": "^0.5.0-alpha.4" + } + }, + "node_modules/solhint/node_modules/commander": { + "version": "2.18.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.18.0.tgz", + "integrity": "sha512-6CYPa+JP2ftfRU2qkDK+UTVeQYosOg/2GbcjIcKPHfinyOLPVGXu/ovN86RP49Re5ndJK1N0kuiidFFuepc4ZQ==", + "dev": true + }, + "node_modules/solhint/node_modules/prettier": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", + "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", + "dev": true, + "optional": true, + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "dev": true, + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/source-map-support/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/spdx-correct": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", + "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", + "dev": true, + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", + "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", + "dev": true + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.11", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.11.tgz", + "integrity": "sha512-Ctl2BrFiM0X3MANYgj3CkygxhRmr9mi6xhejbdO960nF6EDJApTYpn0BQnDKlnNBULKiCN1n3w9EBkHK8ZWg+g==", + "dev": true + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + }, + "node_modules/sshpk": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", + "integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==", + "dependencies": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + }, + "bin": { + "sshpk-conv": "bin/sshpk-conv", + "sshpk-sign": "bin/sshpk-sign", + "sshpk-verify": "bin/sshpk-verify" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sshpk/node_modules/tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + }, + "node_modules/stacktrace-parser": { + "version": "0.1.10", + "resolved": "https://registry.npmjs.org/stacktrace-parser/-/stacktrace-parser-0.1.10.tgz", + "integrity": "sha512-KJP1OCML99+8fhOHxwwzyWrlUuVX5GQ0ZpJTd1DFXhdkrvg1szxfHhawXUZ3g9TkXORQd4/WG68jMlQZ2p8wlg==", + "dev": true, + "dependencies": { + "type-fest": "^0.7.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/stacktrace-parser/node_modules/type-fest": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.7.1.tgz", + "integrity": "sha512-Ne2YiiGN8bmrmJJEuTWTLJR32nh/JdL1+PSicowtNb0WFpn59GK8/lfD61bVtzguz7b3PBt74nxpv/Pw5po5Rg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "dependencies": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/string.prototype.trimend": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz", + "integrity": "sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimstart": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz", + "integrity": "sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "dependencies": { + "ansi-regex": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/strip-bom": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", + "integrity": "sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4=", + "dev": true, + "dependencies": { + "is-utf8": "^0.2.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-hex-prefix": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-hex-prefix/-/strip-hex-prefix-1.0.0.tgz", + "integrity": "sha1-DF8VX+8RUTczd96du1iNoFUA428=", + "dependencies": { + "is-hex-prefixed": "1.0.0" + }, + "engines": { + "node": ">=6.5.0", + "npm": ">=3" + } + }, + "node_modules/strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/table": { + "version": "5.4.6", + "resolved": "https://registry.npmjs.org/table/-/table-5.4.6.tgz", + "integrity": "sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==", + "dev": true, + "dependencies": { + "ajv": "^6.10.2", + "lodash": "^4.17.14", + "slice-ansi": "^2.1.0", + "string-width": "^3.0.0" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/table/node_modules/ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/table/node_modules/string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "dependencies": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/table/node_modules/strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "dependencies": { + "ansi-regex": "^4.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/test-value": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/test-value/-/test-value-2.1.0.tgz", + "integrity": "sha1-Edpv9nDzRxpztiXKTz/c97t0gpE=", + "dev": true, + "dependencies": { + "array-back": "^1.0.3", + "typical": "^2.6.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/test-value/node_modules/array-back": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/array-back/-/array-back-1.0.4.tgz", + "integrity": "sha1-ZEun8JX3/898Q7Xw3DnTwfA8Bjs=", + "dev": true, + "dependencies": { + "typical": "^2.6.0" + }, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/testrpc": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/testrpc/-/testrpc-0.0.1.tgz", + "integrity": "sha512-afH1hO+SQ/VPlmaLUFj2636QMeDvPCeQMc/9RBMW0IfjNe9gFD9Ra3ShqYkB7py0do1ZcCna/9acHyzTJ+GcNA==", + "deprecated": "testrpc has been renamed to ganache-cli, please use this package from now on.", + "dev": true + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", + "dev": true + }, + "node_modules/through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", + "dev": true + }, + "node_modules/tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dev": true, + "dependencies": { + "os-tmpdir": "~1.0.2" + }, + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", + "engines": { + "node": ">=4" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "dev": true, + "engines": { + "node": ">=0.6" + } + }, + "node_modules/tough-cookie": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", + "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", + "dependencies": { + "psl": "^1.1.28", + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/tough-cookie/node_modules/punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=", + "dev": true + }, + "node_modules/true-case-path": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/true-case-path/-/true-case-path-2.2.1.tgz", + "integrity": "sha512-0z3j8R7MCjy10kc/g+qg7Ln3alJTodw9aDuVWZa3uiWqfuBMKeAeP2ocWcxoyM3D73yz3Jt/Pu4qPr4wHSdB/Q==", + "dev": true + }, + "node_modules/truffle-plugin-verify": { + "version": "0.5.20", + "resolved": "https://registry.npmjs.org/truffle-plugin-verify/-/truffle-plugin-verify-0.5.20.tgz", + "integrity": "sha512-s6zG7QbVK5tWPAhRz1oKi/M8SXdRgcWR4PRuHM/BB0qZBcE/82WmnqyC2D/qfqEY+BCgUUWXfc/hyzsgH4dyNw==", + "dev": true, + "dependencies": { + "axios": "^0.21.1", + "cli-logger": "^0.5.40", + "delay": "^5.0.0", + "querystring": "^0.2.1" + } + }, + "node_modules/ts-essentials": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/ts-essentials/-/ts-essentials-1.0.4.tgz", + "integrity": "sha512-q3N1xS4vZpRouhYHDPwO0bDW3EZ6SK9CrrDHxi/D6BPReSjpVgWIOpLS2o0gSBZm+7q/wyKp6RVM1AeeW7uyfQ==", + "dev": true + }, + "node_modules/ts-generator": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/ts-generator/-/ts-generator-0.1.1.tgz", + "integrity": "sha512-N+ahhZxTLYu1HNTQetwWcx3so8hcYbkKBHTr4b4/YgObFTIKkOSSsaa+nal12w8mfrJAyzJfETXawbNjSfP2gQ==", + "dev": true, + "dependencies": { + "@types/mkdirp": "^0.5.2", + "@types/prettier": "^2.1.1", + "@types/resolve": "^0.0.8", + "chalk": "^2.4.1", + "glob": "^7.1.2", + "mkdirp": "^0.5.1", + "prettier": "^2.1.2", + "resolve": "^1.8.1", + "ts-essentials": "^1.0.0" + }, + "bin": { + "ts-generator": "dist/cli/run.js" + } + }, + "node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "node_modules/tsort": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/tsort/-/tsort-0.0.1.tgz", + "integrity": "sha1-4igPXoF/i/QnVlf9D5rr1E9aJ4Y=", + "dev": true + }, + "node_modules/tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/tweetnacl": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-1.0.3.tgz", + "integrity": "sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw==", + "dev": true + }, + "node_modules/tweetnacl-util": { + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/tweetnacl-util/-/tweetnacl-util-0.15.1.tgz", + "integrity": "sha512-RKJBIj8lySrShN4w6i/BonWp2Z/uxwC3h4y7xsRrpP59ZboCd0GpEVsOnMDYLMmKBpYhb5TgHzZXy7wTfYFBRw==", + "dev": true + }, + "node_modules/type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "dev": true, + "dependencies": { + "prelude-ls": "~1.1.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typechain": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/typechain/-/typechain-3.0.0.tgz", + "integrity": "sha512-ft4KVmiN3zH4JUFu2WJBrwfHeDf772Tt2d8bssDTo/YcckKW2D+OwFrHXRC6hJvO3mHjFQTihoMV6fJOi0Hngg==", + "dev": true, + "dependencies": { + "command-line-args": "^4.0.7", + "debug": "^4.1.1", + "fs-extra": "^7.0.0", + "js-sha3": "^0.8.0", + "lodash": "^4.17.15", + "ts-essentials": "^6.0.3", + "ts-generator": "^0.1.1" + }, + "bin": { + "typechain": "dist/cli/cli.js" + } + }, + "node_modules/typechain/node_modules/ts-essentials": { + "version": "6.0.7", + "resolved": "https://registry.npmjs.org/ts-essentials/-/ts-essentials-6.0.7.tgz", + "integrity": "sha512-2E4HIIj4tQJlIHuATRHayv0EfMGK3ris/GRk1E3CFnsZzeNV+hUmelbaTZHLtXaZppM5oLhHRtO04gINC4Jusw==", + "dev": true, + "peerDependencies": { + "typescript": ">=3.7.0" + } + }, + "node_modules/typescript": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.5.4.tgz", + "integrity": "sha512-VgYs2A2QIRuGphtzFV7aQJduJ2gyfTljngLzjpfW9FoYZF6xuw1W0vW9ghCKLfcWrCFxK81CSGRAvS1pn4fIUg==", + "dev": true, + "peer": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/typical": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/typical/-/typical-2.6.1.tgz", + "integrity": "sha1-XAgOXWYcu+OCWdLnCjxyU+hziB0=", + "dev": true + }, + "node_modules/unbox-primitive": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.1.tgz", + "integrity": "sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1", + "has-bigints": "^1.0.1", + "has-symbols": "^1.0.2", + "which-boxed-primitive": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "dev": true, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/url": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz", + "integrity": "sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=", + "dev": true, + "dependencies": { + "punycode": "1.3.2", + "querystring": "0.2.0" + } + }, + "node_modules/url/node_modules/punycode": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=", + "dev": true + }, + "node_modules/url/node_modules/querystring": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=", + "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", + "dev": true, + "engines": { + "node": ">=0.4.x" + } + }, + "node_modules/utf8": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/utf8/-/utf8-3.0.0.tgz", + "integrity": "sha512-E8VjFIQ/TyQgp+TZfS6l8yp/xWppSAHzidGiRrqe4bK4XP9pTRyKFgGJpO3SN7zdX4DeomTrwaseCHovfpFcqQ==" + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + }, + "node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "engines": [ + "node >=0.6.0" + ], + "dependencies": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } + }, + "node_modules/verror/node_modules/core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + }, + "node_modules/web3-utils": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/web3-utils/-/web3-utils-1.7.0.tgz", + "integrity": "sha512-O8Tl4Ky40Sp6pe89Olk2FsaUkgHyb5QAXuaKo38ms3CxZZ4d3rPGfjP9DNKGm5+IUgAZBNpF1VmlSmNCqfDI1w==", + "dev": true, + "dependencies": { + "bn.js": "^4.11.9", + "ethereum-bloom-filters": "^1.0.6", + "ethereumjs-util": "^7.1.0", + "ethjs-unit": "0.1.6", + "number-to-bn": "1.7.0", + "randombytes": "^2.1.0", + "utf8": "3.0.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/web3-utils/node_modules/ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dev": true, + "dependencies": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/web3-utils/node_modules/ethereumjs-util/node_modules/bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=", + "dev": true + }, + "node_modules/whatwg-fetch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-2.0.4.tgz", + "integrity": "sha512-dcQ1GWpOD/eEQ97k66aiEVpNnapVj90/+R+SXTPYGHpYBBypfKJEQjLrvMZ7YXbKm21gXd4NcuxUTjiv1YtLng==" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", + "dev": true, + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, + "node_modules/which-boxed-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "dev": true, + "dependencies": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "dev": true + }, + "node_modules/wide-align": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", + "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", + "dev": true, + "dependencies": { + "string-width": "^1.0.2 || 2" + } + }, + "node_modules/window-size": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.2.0.tgz", + "integrity": "sha1-tDFbtCFKPXBY6+7okuE/ok2YsHU=", + "dev": true, + "bin": { + "window-size": "cli.js" + }, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wrap-ansi": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/wrap-ansi/node_modules/string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "dependencies": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "dependencies": { + "ansi-regex": "^4.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "node_modules/write": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/write/-/write-1.0.3.tgz", + "integrity": "sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==", + "dev": true, + "dependencies": { + "mkdirp": "^0.5.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ws": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-5.2.3.tgz", + "integrity": "sha512-jZArVERrMsKUatIdnLzqvcfydI85dvd/Fp1u/VOpfdDWQ4c9qWXe+VIeAbQ5FrDwciAkr+lzofXLz3Kuf26AOA==", + "dependencies": { + "async-limiter": "~1.0.0" + } + }, + "node_modules/xhr": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/xhr/-/xhr-2.6.0.tgz", + "integrity": "sha512-/eCGLb5rxjx5e3mF1A7s+pLlR6CGyqWN91fv1JgER5mVWg1MZmlhBvy9kjcsOdRk8RrIujotWyJamfyrp+WIcA==", + "dependencies": { + "global": "~4.4.0", + "is-function": "^1.0.1", + "parse-headers": "^2.0.0", + "xtend": "^4.0.0" + } + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/y18n": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", + "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==", + "dev": true + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true + }, + "node_modules/yargs": { + "version": "13.3.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", + "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", + "dev": true, + "dependencies": { + "cliui": "^5.0.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^13.1.2" + } + }, + "node_modules/yargs-parser": { + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", + "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", + "dev": true, + "dependencies": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + }, + "node_modules/yargs-unparser": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-1.6.0.tgz", + "integrity": "sha512-W9tKgmSn0DpSatfri0nx52Joq5hVXgeLiqR/5G0sZNDoLZFOr/xjBUDcShCOGNsBnEMNo1KAMBkTej1Hm62HTw==", + "dev": true, + "dependencies": { + "flat": "^4.1.0", + "lodash": "^4.17.15", + "yargs": "^13.3.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "dependencies": { + "locate-path": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "dependencies": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yargs/node_modules/p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "dependencies": { + "p-limit": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "dependencies": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "dependencies": { + "ansi-regex": "^4.1.0" + }, + "engines": { + "node": ">=6" + } + } + }, + "dependencies": { + "@babel/code-frame": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz", + "integrity": "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==", + "requires": { + "@babel/highlight": "^7.16.7" + } + }, + "@babel/compat-data": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.16.8.tgz", + "integrity": "sha512-m7OkX0IdKLKPpBlJtF561YJal5y/jyI5fNfWbPxh2D/nbzzGI4qRyrD8xO2jB24u7l+5I2a43scCG2IrfjC50Q==" + }, + "@babel/core": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.16.7.tgz", + "integrity": "sha512-aeLaqcqThRNZYmbMqtulsetOQZ/5gbR/dWruUCJcpas4Qoyy+QeagfDsPdMrqwsPRDNxJvBlRiZxxX7THO7qtA==", + "peer": true, + "requires": { + "@babel/code-frame": "^7.16.7", + "@babel/generator": "^7.16.7", + "@babel/helper-compilation-targets": "^7.16.7", + "@babel/helper-module-transforms": "^7.16.7", + "@babel/helpers": "^7.16.7", + "@babel/parser": "^7.16.7", + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.16.7", + "@babel/types": "^7.16.7", + "convert-source-map": "^1.7.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.1.2", + "semver": "^6.3.0", + "source-map": "^0.5.0" + } + }, + "@babel/generator": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.16.8.tgz", + "integrity": "sha512-1ojZwE9+lOXzcWdWmO6TbUzDfqLD39CmEhN8+2cX9XkDo5yW1OpgfejfliysR2AWLpMamTiOiAp/mtroaymhpw==", + "requires": { + "@babel/types": "^7.16.8", + "jsesc": "^2.5.1", + "source-map": "^0.5.0" + } + }, + "@babel/helper-compilation-targets": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.16.7.tgz", + "integrity": "sha512-mGojBwIWcwGD6rfqgRXVlVYmPAv7eOpIemUG3dGnDdCY4Pae70ROij3XmfrH6Fa1h1aiDylpglbZyktfzyo/hA==", + "requires": { + "@babel/compat-data": "^7.16.4", + "@babel/helper-validator-option": "^7.16.7", + "browserslist": "^4.17.5", + "semver": "^6.3.0" + } + }, + "@babel/helper-define-polyfill-provider": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.1.tgz", + "integrity": "sha512-J9hGMpJQmtWmj46B3kBHmL38UhJGhYX7eqkcq+2gsstyYt341HmPeWspihX43yVRA0mS+8GGk2Gckc7bY/HCmA==", + "requires": { + "@babel/helper-compilation-targets": "^7.13.0", + "@babel/helper-module-imports": "^7.12.13", + "@babel/helper-plugin-utils": "^7.13.0", + "@babel/traverse": "^7.13.0", + "debug": "^4.1.1", + "lodash.debounce": "^4.0.8", + "resolve": "^1.14.2", + "semver": "^6.1.2" + } + }, + "@babel/helper-environment-visitor": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.16.7.tgz", + "integrity": "sha512-SLLb0AAn6PkUeAfKJCCOl9e1R53pQlGAfc4y4XuMRZfqeMYLE0dM1LMhqbGAlGQY0lfw5/ohoYWAe9V1yibRag==", + "requires": { + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-function-name": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.16.7.tgz", + "integrity": "sha512-QfDfEnIUyyBSR3HtrtGECuZ6DAyCkYFp7GHl75vFtTnn6pjKeK0T1DB5lLkFvBea8MdaiUABx3osbgLyInoejA==", + "requires": { + "@babel/helper-get-function-arity": "^7.16.7", + "@babel/template": "^7.16.7", + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-get-function-arity": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.16.7.tgz", + "integrity": "sha512-flc+RLSOBXzNzVhcLu6ujeHUrD6tANAOU5ojrRx/as+tbzf8+stUCj7+IfRRoAbEZqj/ahXEMsjhOhgeZsrnTw==", + "requires": { + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-hoist-variables": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.7.tgz", + "integrity": "sha512-m04d/0Op34H5v7pbZw6pSKP7weA6lsMvfiIAMeIvkY/R4xQtBSMFEigu9QTZ2qB/9l22vsxtM8a+Q8CzD255fg==", + "requires": { + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-module-imports": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.16.7.tgz", + "integrity": "sha512-LVtS6TqjJHFc+nYeITRo6VLXve70xmq7wPhWTqDJusJEgGmkAACWwMiTNrvfoQo6hEhFwAIixNkvB0jPXDL8Wg==", + "requires": { + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-module-transforms": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.16.7.tgz", + "integrity": "sha512-gaqtLDxJEFCeQbYp9aLAefjhkKdjKcdh6DB7jniIGU3Pz52WAmP268zK0VgPz9hUNkMSYeH976K2/Y6yPadpng==", + "peer": true, + "requires": { + "@babel/helper-environment-visitor": "^7.16.7", + "@babel/helper-module-imports": "^7.16.7", + "@babel/helper-simple-access": "^7.16.7", + "@babel/helper-split-export-declaration": "^7.16.7", + "@babel/helper-validator-identifier": "^7.16.7", + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.16.7", + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-plugin-utils": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.16.7.tgz", + "integrity": "sha512-Qg3Nk7ZxpgMrsox6HreY1ZNKdBq7K72tDSliA6dCl5f007jR4ne8iD5UzuNnCJH2xBf2BEEVGr+/OL6Gdp7RxA==" + }, + "@babel/helper-simple-access": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.16.7.tgz", + "integrity": "sha512-ZIzHVyoeLMvXMN/vok/a4LWRy8G2v205mNP0XOuf9XRLyX5/u9CnVulUtDgUTama3lT+bf/UqucuZjqiGuTS1g==", + "peer": true, + "requires": { + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-split-export-declaration": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.7.tgz", + "integrity": "sha512-xbWoy/PFoxSWazIToT9Sif+jJTlrMcndIsaOKvTA6u7QEo7ilkRZpjew18/W3c7nm8fXdUDXh02VXTbZ0pGDNw==", + "requires": { + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-validator-identifier": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz", + "integrity": "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==" + }, + "@babel/helper-validator-option": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz", + "integrity": "sha512-TRtenOuRUVo9oIQGPC5G9DgK4743cdxvtOw0weQNpZXaS16SCBi5MNjZF8vba3ETURjZpTbVn7Vvcf2eAwFozQ==" + }, + "@babel/helpers": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.16.7.tgz", + "integrity": "sha512-9ZDoqtfY7AuEOt3cxchfii6C7GDyyMBffktR5B2jvWv8u2+efwvpnVKXMWzNehqy68tKgAfSwfdw/lWpthS2bw==", + "peer": true, + "requires": { + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.16.7", + "@babel/types": "^7.16.7" + } + }, + "@babel/highlight": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.16.7.tgz", + "integrity": "sha512-aKpPMfLvGO3Q97V0qhw/V2SWNWlwfJknuwAunU7wZLSfrM4xTBvg7E5opUVi1kJTBKihE38CPg4nBiqX83PWYw==", + "requires": { + "@babel/helper-validator-identifier": "^7.16.7", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "@babel/parser": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.16.8.tgz", + "integrity": "sha512-i7jDUfrVBWc+7OKcBzEe5n7fbv3i2fWtxKzzCvOjnzSxMfWMigAhtfJ7qzZNGFNMsCCd67+uz553dYKWXPvCKw==" + }, + "@babel/plugin-transform-runtime": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.16.8.tgz", + "integrity": "sha512-6Kg2XHPFnIarNweZxmzbgYnnWsXxkx9WQUVk2sksBRL80lBC1RAQV3wQagWxdCHiYHqPN+oenwNIuttlYgIbQQ==", + "requires": { + "@babel/helper-module-imports": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "babel-plugin-polyfill-corejs2": "^0.3.0", + "babel-plugin-polyfill-corejs3": "^0.5.0", + "babel-plugin-polyfill-regenerator": "^0.3.0", + "semver": "^6.3.0" + } + }, + "@babel/runtime": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.16.7.tgz", + "integrity": "sha512-9E9FJowqAsytyOY6LG+1KuueckRL+aQW+mKvXRXnuFGyRAyepJPmEo9vgMfXUA6O9u3IeEdv9MAkppFcaQwogQ==", + "requires": { + "regenerator-runtime": "^0.13.4" + } + }, + "@babel/template": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.16.7.tgz", + "integrity": "sha512-I8j/x8kHUrbYRTUxXrrMbfCa7jxkE7tZre39x3kjr9hvI82cK1FfqLygotcWN5kdPGWcLdWMHpSBavse5tWw3w==", + "requires": { + "@babel/code-frame": "^7.16.7", + "@babel/parser": "^7.16.7", + "@babel/types": "^7.16.7" + } + }, + "@babel/traverse": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.16.8.tgz", + "integrity": "sha512-xe+H7JlvKsDQwXRsBhSnq1/+9c+LlQcCK3Tn/l5sbx02HYns/cn7ibp9+RV1sIUqu7hKg91NWsgHurO9dowITQ==", + "requires": { + "@babel/code-frame": "^7.16.7", + "@babel/generator": "^7.16.8", + "@babel/helper-environment-visitor": "^7.16.7", + "@babel/helper-function-name": "^7.16.7", + "@babel/helper-hoist-variables": "^7.16.7", + "@babel/helper-split-export-declaration": "^7.16.7", + "@babel/parser": "^7.16.8", + "@babel/types": "^7.16.8", + "debug": "^4.1.0", + "globals": "^11.1.0" + } + }, + "@babel/types": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.16.8.tgz", + "integrity": "sha512-smN2DQc5s4M7fntyjGtyIPbRJv6wW4rU/94fmYJ7PKQuZkC0qGMHXJbg6sNGt12JmVr4k5YaptI/XtiLJBnmIg==", + "requires": { + "@babel/helper-validator-identifier": "^7.16.7", + "to-fast-properties": "^2.0.0" + } + }, + "@ensdomains/ens": { + "version": "0.4.5", + "resolved": "https://registry.npmjs.org/@ensdomains/ens/-/ens-0.4.5.tgz", + "integrity": "sha512-JSvpj1iNMFjK6K+uVl4unqMoa9rf5jopb8cya5UGBWz23Nw8hSNT7efgUx4BTlAPAgpNlEioUfeTyQ6J9ZvTVw==", + "dev": true, + "requires": { + "bluebird": "^3.5.2", + "eth-ens-namehash": "^2.0.8", + "solc": "^0.4.20", + "testrpc": "0.0.1", + "web3-utils": "^1.0.0-beta.31" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true + }, + "camelcase": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-3.0.0.tgz", + "integrity": "sha1-MvxLn82vhF/N9+c7uXysImHwqwo=", + "dev": true + }, + "cliui": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-3.2.0.tgz", + "integrity": "sha1-EgYBU3qRbSmUD5NNo7SNWFo5IT0=", + "dev": true, + "requires": { + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wrap-ansi": "^2.0.0" + } + }, + "fs-extra": { + "version": "0.30.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-0.30.0.tgz", + "integrity": "sha1-8jP/zAjU2n1DLapEl3aYnbHfk/A=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "jsonfile": "^2.1.0", + "klaw": "^1.0.0", + "path-is-absolute": "^1.0.0", + "rimraf": "^2.2.8" + } + }, + "get-caller-file": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", + "integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "dev": true, + "requires": { + "number-is-nan": "^1.0.0" + } + }, + "jsonfile": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", + "integrity": "sha1-NzaitCi4e72gzIO1P6PWM6NcKug=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.6" + } + }, + "require-from-string": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-1.2.1.tgz", + "integrity": "sha1-UpyczvJzgK3+yaL5ZbZJu+5jZBg=", + "dev": true + }, + "require-main-filename": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz", + "integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=", + "dev": true + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + }, + "solc": { + "version": "0.4.26", + "resolved": "https://registry.npmjs.org/solc/-/solc-0.4.26.tgz", + "integrity": "sha512-o+c6FpkiHd+HPjmjEVpQgH7fqZ14tJpXhho+/bQXlXbliLIS/xjXb42Vxh+qQY1WCSTMQ0+a5vR9vi0MfhU6mA==", + "dev": true, + "requires": { + "fs-extra": "^0.30.0", + "memorystream": "^0.3.1", + "require-from-string": "^1.1.0", + "semver": "^5.3.0", + "yargs": "^4.7.1" + } + }, + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "dev": true, + "requires": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, + "requires": { + "ansi-regex": "^2.0.0" + } + }, + "which-module": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-1.0.0.tgz", + "integrity": "sha1-u6Y8qGGUiZT/MHc2CJ47lgJsKk8=", + "dev": true + }, + "wrap-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", + "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", + "dev": true, + "requires": { + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1" + } + }, + "y18n": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-3.2.2.tgz", + "integrity": "sha512-uGZHXkHnhF0XeeAPgnKfPv1bgKAYyVvmNL1xlKsPYZPaIHxGti2hHqvOCQv71XMsLxu1QjergkqogUnms5D3YQ==", + "dev": true + }, + "yargs": { + "version": "4.8.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-4.8.1.tgz", + "integrity": "sha1-wMQpJMpKqmsObaFznfshZDn53cA=", + "dev": true, + "requires": { + "cliui": "^3.2.0", + "decamelize": "^1.1.1", + "get-caller-file": "^1.0.1", + "lodash.assign": "^4.0.3", + "os-locale": "^1.4.0", + "read-pkg-up": "^1.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^1.0.1", + "set-blocking": "^2.0.0", + "string-width": "^1.0.1", + "which-module": "^1.0.0", + "window-size": "^0.2.0", + "y18n": "^3.2.1", + "yargs-parser": "^2.4.1" + } + }, + "yargs-parser": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-2.4.1.tgz", + "integrity": "sha1-hVaN488VD/SfpRgl8DqMiA3cxcQ=", + "dev": true, + "requires": { + "camelcase": "^3.0.0", + "lodash.assign": "^4.0.6" + } + } + } + }, + "@ensdomains/resolver": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/@ensdomains/resolver/-/resolver-0.2.4.tgz", + "integrity": "sha512-bvaTH34PMCbv6anRa9I/0zjLJgY4EuznbEMgbV77JBCQ9KNC46rzi0avuxpOfu+xDjPEtSFGqVEOr5GlUSGudA==", + "dev": true + }, + "@ethereum-waffle/chai": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/@ethereum-waffle/chai/-/chai-3.4.1.tgz", + "integrity": "sha512-8mjgjWCe8XSCWuyJgVtJY8sm00VTczGBTDxBejgEBWN/J9x7QD8jdmWW8bfxdnqZbxiDCTvRFL58Wmd254BEqQ==", + "dev": true, + "requires": { + "@ethereum-waffle/provider": "^3.4.0", + "ethers": "^5.4.7" + } + }, + "@ethereum-waffle/compiler": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/@ethereum-waffle/compiler/-/compiler-3.4.0.tgz", + "integrity": "sha512-a2wxGOoB9F1QFRE+Om7Cz2wn+pxM/o7a0a6cbwhaS2lECJgFzeN9xEkVrKahRkF4gEfXGcuORg4msP0Asxezlw==", + "dev": true, + "requires": { + "@resolver-engine/imports": "^0.3.3", + "@resolver-engine/imports-fs": "^0.3.3", + "@typechain/ethers-v5": "^2.0.0", + "@types/mkdirp": "^0.5.2", + "@types/node-fetch": "^2.5.5", + "ethers": "^5.0.1", + "mkdirp": "^0.5.1", + "node-fetch": "^2.6.1", + "solc": "^0.6.3", + "ts-generator": "^0.1.1", + "typechain": "^3.0.0" + } + }, + "@ethereum-waffle/ens": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@ethereum-waffle/ens/-/ens-3.3.1.tgz", + "integrity": "sha512-xSjNWnT2Iwii3J3XGqD+F5yLEOzQzLHNLGfI5KIXdtQ4FHgReW/AMGRgPPLi+n+SP08oEQWJ3sEKrvbFlwJuaA==", + "dev": true, + "requires": { + "@ensdomains/ens": "^0.4.4", + "@ensdomains/resolver": "^0.2.4", + "ethers": "^5.5.2" + } + }, + "@ethereum-waffle/mock-contract": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@ethereum-waffle/mock-contract/-/mock-contract-3.3.1.tgz", + "integrity": "sha512-h9yChF7IkpJLODg/o9/jlwKwTcXJLSEIq3gewgwUJuBHnhPkJGekcZvsTbximYc+e42QUZrDUATSuTCIryeCEA==", + "dev": true, + "requires": { + "@ethersproject/abi": "^5.5.0", + "ethers": "^5.5.2" + } + }, + "@ethereum-waffle/provider": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/@ethereum-waffle/provider/-/provider-3.4.1.tgz", + "integrity": "sha512-5iDte7c9g9N1rTRE/P4npwk1Hus/wA2yH850X6sP30mr1IrwSG9NKn6/2SOQkAVJnh9jqyLVg2X9xCODWL8G4A==", + "dev": true, + "requires": { + "@ethereum-waffle/ens": "^3.3.1", + "ethers": "^5.5.2", + "ganache-core": "^2.13.2", + "patch-package": "^6.2.2", + "postinstall-postinstall": "^2.1.0" + } + }, + "@ethereumjs/block": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@ethereumjs/block/-/block-3.6.0.tgz", + "integrity": "sha512-dqLo1LtsLG+Oelu5S5tWUDG0pah3QUwV5TJZy2cm19BXDr4ka/S9XBSgao0i09gTcuPlovlHgcs6d7EZ37urjQ==", + "dev": true, + "requires": { + "@ethereumjs/common": "^2.6.0", + "@ethereumjs/tx": "^3.4.0", + "ethereumjs-util": "^7.1.3", + "merkle-patricia-tree": "^4.2.2" + }, + "dependencies": { + "bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true + }, + "ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dev": true, + "requires": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + } + }, + "level-ws": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/level-ws/-/level-ws-2.0.0.tgz", + "integrity": "sha512-1iv7VXx0G9ec1isqQZ7y5LmoZo/ewAsyDHNA8EFDW5hqH2Kqovm33nSFkSdnLLAK+I5FlT+lo5Cw9itGe+CpQA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "readable-stream": "^3.1.0", + "xtend": "^4.0.1" + } + }, + "merkle-patricia-tree": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/merkle-patricia-tree/-/merkle-patricia-tree-4.2.2.tgz", + "integrity": "sha512-eqZYNTshcYx9aESkSPr71EqwsR/QmpnObDEV4iLxkt/x/IoLYZYjJvKY72voP/27Vy61iMOrfOG6jrn7ttXD+Q==", + "dev": true, + "requires": { + "@types/levelup": "^4.3.0", + "ethereumjs-util": "^7.1.2", + "level-mem": "^5.0.1", + "level-ws": "^2.0.0", + "readable-stream": "^3.6.0", + "rlp": "^2.2.4", + "semaphore-async-await": "^1.5.1" + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, + "@ethereumjs/blockchain": { + "version": "5.5.1", + "resolved": "https://registry.npmjs.org/@ethereumjs/blockchain/-/blockchain-5.5.1.tgz", + "integrity": "sha512-JS2jeKxl3tlaa5oXrZ8mGoVBCz6YqsGG350XVNtHAtNZXKk7pU3rH4xzF2ru42fksMMqzFLzKh9l4EQzmNWDqA==", + "dev": true, + "requires": { + "@ethereumjs/block": "^3.6.0", + "@ethereumjs/common": "^2.6.0", + "@ethereumjs/ethash": "^1.1.0", + "debug": "^2.2.0", + "ethereumjs-util": "^7.1.3", + "level-mem": "^5.0.1", + "lru-cache": "^5.1.1", + "semaphore-async-await": "^1.5.1" + }, + "dependencies": { + "bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dev": true, + "requires": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + } + } + }, + "@ethereumjs/common": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/@ethereumjs/common/-/common-2.6.0.tgz", + "integrity": "sha512-Cq2qS0FTu6O2VU1sgg+WyU9Ps0M6j/BEMHN+hRaECXCV/r0aI78u4N6p52QW/BDVhwWZpCdrvG8X7NJdzlpNUA==", + "dev": true, + "requires": { + "crc-32": "^1.2.0", + "ethereumjs-util": "^7.1.3" + }, + "dependencies": { + "bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true + }, + "ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dev": true, + "requires": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + } + } + } + }, + "@ethereumjs/ethash": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@ethereumjs/ethash/-/ethash-1.1.0.tgz", + "integrity": "sha512-/U7UOKW6BzpA+Vt+kISAoeDie1vAvY4Zy2KF5JJb+So7+1yKmJeJEHOGSnQIj330e9Zyl3L5Nae6VZyh2TJnAA==", + "dev": true, + "requires": { + "@ethereumjs/block": "^3.5.0", + "@types/levelup": "^4.3.0", + "buffer-xor": "^2.0.1", + "ethereumjs-util": "^7.1.1", + "miller-rabin": "^4.0.0" + }, + "dependencies": { + "bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true + }, + "buffer-xor": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-2.0.2.tgz", + "integrity": "sha512-eHslX0bin3GB+Lx2p7lEYRShRewuNZL3fUl4qlVJGGiwoPGftmt8JQgk2Y9Ji5/01TnVDo33E5b5O3vUB1HdqQ==", + "dev": true, + "requires": { + "safe-buffer": "^5.1.1" + } + }, + "ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dev": true, + "requires": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + } + } + } + }, + "@ethereumjs/tx": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/@ethereumjs/tx/-/tx-3.4.0.tgz", + "integrity": "sha512-WWUwg1PdjHKZZxPPo274ZuPsJCWV3SqATrEKQP1n2DrVYVP1aZIYpo/mFaA0BDoE0tIQmBeimRCEA0Lgil+yYw==", + "dev": true, + "requires": { + "@ethereumjs/common": "^2.6.0", + "ethereumjs-util": "^7.1.3" + }, + "dependencies": { + "bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true + }, + "ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dev": true, + "requires": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + } + } + } + }, + "@ethereumjs/vm": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/@ethereumjs/vm/-/vm-5.6.0.tgz", + "integrity": "sha512-J2m/OgjjiGdWF2P9bj/4LnZQ1zRoZhY8mRNVw/N3tXliGI8ai1sI1mlDPkLpeUUM4vq54gH6n0ZlSpz8U/qlYQ==", + "dev": true, + "requires": { + "@ethereumjs/block": "^3.6.0", + "@ethereumjs/blockchain": "^5.5.0", + "@ethereumjs/common": "^2.6.0", + "@ethereumjs/tx": "^3.4.0", + "async-eventemitter": "^0.2.4", + "core-js-pure": "^3.0.1", + "debug": "^2.2.0", + "ethereumjs-util": "^7.1.3", + "functional-red-black-tree": "^1.0.1", + "mcl-wasm": "^0.7.1", + "merkle-patricia-tree": "^4.2.2", + "rustbn.js": "~0.2.0" + }, + "dependencies": { + "bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dev": true, + "requires": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + } + }, + "level-ws": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/level-ws/-/level-ws-2.0.0.tgz", + "integrity": "sha512-1iv7VXx0G9ec1isqQZ7y5LmoZo/ewAsyDHNA8EFDW5hqH2Kqovm33nSFkSdnLLAK+I5FlT+lo5Cw9itGe+CpQA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "readable-stream": "^3.1.0", + "xtend": "^4.0.1" + } + }, + "merkle-patricia-tree": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/merkle-patricia-tree/-/merkle-patricia-tree-4.2.2.tgz", + "integrity": "sha512-eqZYNTshcYx9aESkSPr71EqwsR/QmpnObDEV4iLxkt/x/IoLYZYjJvKY72voP/27Vy61iMOrfOG6jrn7ttXD+Q==", + "dev": true, + "requires": { + "@types/levelup": "^4.3.0", + "ethereumjs-util": "^7.1.2", + "level-mem": "^5.0.1", + "level-ws": "^2.0.0", + "readable-stream": "^3.6.0", + "rlp": "^2.2.4", + "semaphore-async-await": "^1.5.1" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, + "@ethersproject/abi": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/abi/-/abi-5.5.0.tgz", + "integrity": "sha512-loW7I4AohP5KycATvc0MgujU6JyCHPqHdeoo9z3Nr9xEiNioxa65ccdm1+fsoJhkuhdRtfcL8cfyGamz2AxZ5w==", + "dev": true, + "requires": { + "@ethersproject/address": "^5.5.0", + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/constants": "^5.5.0", + "@ethersproject/hash": "^5.5.0", + "@ethersproject/keccak256": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/strings": "^5.5.0" + } + }, + "@ethersproject/abstract-provider": { + "version": "5.5.1", + "resolved": "https://registry.npmjs.org/@ethersproject/abstract-provider/-/abstract-provider-5.5.1.tgz", + "integrity": "sha512-m+MA/ful6eKbxpr99xUYeRvLkfnlqzrF8SZ46d/xFB1A7ZVknYc/sXJG0RcufF52Qn2jeFj1hhcoQ7IXjNKUqg==", + "dev": true, + "requires": { + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/networks": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/transactions": "^5.5.0", + "@ethersproject/web": "^5.5.0" + } + }, + "@ethersproject/abstract-signer": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/abstract-signer/-/abstract-signer-5.5.0.tgz", + "integrity": "sha512-lj//7r250MXVLKI7sVarXAbZXbv9P50lgmJQGr2/is82EwEb8r7HrxsmMqAjTsztMYy7ohrIhGMIml+Gx4D3mA==", + "dev": true, + "requires": { + "@ethersproject/abstract-provider": "^5.5.0", + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0" + } + }, + "@ethersproject/address": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/address/-/address-5.5.0.tgz", + "integrity": "sha512-l4Nj0eWlTUh6ro5IbPTgbpT4wRbdH5l8CQf7icF7sb/SI3Nhd9Y9HzhonTSTi6CefI0necIw7LJqQPopPLZyWw==", + "dev": true, + "requires": { + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/keccak256": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/rlp": "^5.5.0" + } + }, + "@ethersproject/base64": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/base64/-/base64-5.5.0.tgz", + "integrity": "sha512-tdayUKhU1ljrlHzEWbStXazDpsx4eg1dBXUSI6+mHlYklOXoXF6lZvw8tnD6oVaWfnMxAgRSKROg3cVKtCcppA==", + "dev": true, + "requires": { + "@ethersproject/bytes": "^5.5.0" + } + }, + "@ethersproject/basex": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/basex/-/basex-5.5.0.tgz", + "integrity": "sha512-ZIodwhHpVJ0Y3hUCfUucmxKsWQA5TMnavp5j/UOuDdzZWzJlRmuOjcTMIGgHCYuZmHt36BfiSyQPSRskPxbfaQ==", + "dev": true, + "requires": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/properties": "^5.5.0" + } + }, + "@ethersproject/bignumber": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/bignumber/-/bignumber-5.5.0.tgz", + "integrity": "sha512-6Xytlwvy6Rn3U3gKEc1vP7nR92frHkv6wtVr95LFR3jREXiCPzdWxKQ1cx4JGQBXxcguAwjA8murlYN2TSiEbg==", + "dev": true, + "requires": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "bn.js": "^4.11.9" + } + }, + "@ethersproject/bytes": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/bytes/-/bytes-5.5.0.tgz", + "integrity": "sha512-ABvc7BHWhZU9PNM/tANm/Qx4ostPGadAuQzWTr3doklZOhDlmcBqclrQe/ZXUIj3K8wC28oYeuRa+A37tX9kog==", + "dev": true, + "requires": { + "@ethersproject/logger": "^5.5.0" + } + }, + "@ethersproject/constants": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/constants/-/constants-5.5.0.tgz", + "integrity": "sha512-2MsRRVChkvMWR+GyMGY4N1sAX9Mt3J9KykCsgUFd/1mwS0UH1qw+Bv9k1UJb3X3YJYFco9H20pjSlOIfCG5HYQ==", + "dev": true, + "requires": { + "@ethersproject/bignumber": "^5.5.0" + } + }, + "@ethersproject/contracts": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/contracts/-/contracts-5.5.0.tgz", + "integrity": "sha512-2viY7NzyvJkh+Ug17v7g3/IJC8HqZBDcOjYARZLdzRxrfGlRgmYgl6xPRKVbEzy1dWKw/iv7chDcS83pg6cLxg==", + "dev": true, + "requires": { + "@ethersproject/abi": "^5.5.0", + "@ethersproject/abstract-provider": "^5.5.0", + "@ethersproject/abstract-signer": "^5.5.0", + "@ethersproject/address": "^5.5.0", + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/constants": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/transactions": "^5.5.0" + } + }, + "@ethersproject/hash": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/hash/-/hash-5.5.0.tgz", + "integrity": "sha512-dnGVpK1WtBjmnp3mUT0PlU2MpapnwWI0PibldQEq1408tQBAbZpPidkWoVVuNMOl/lISO3+4hXZWCL3YV7qzfg==", + "dev": true, + "requires": { + "@ethersproject/abstract-signer": "^5.5.0", + "@ethersproject/address": "^5.5.0", + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/keccak256": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/strings": "^5.5.0" + } + }, + "@ethersproject/hdnode": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/hdnode/-/hdnode-5.5.0.tgz", + "integrity": "sha512-mcSOo9zeUg1L0CoJH7zmxwUG5ggQHU1UrRf8jyTYy6HxdZV+r0PBoL1bxr+JHIPXRzS6u/UW4mEn43y0tmyF8Q==", + "dev": true, + "requires": { + "@ethersproject/abstract-signer": "^5.5.0", + "@ethersproject/basex": "^5.5.0", + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/pbkdf2": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/sha2": "^5.5.0", + "@ethersproject/signing-key": "^5.5.0", + "@ethersproject/strings": "^5.5.0", + "@ethersproject/transactions": "^5.5.0", + "@ethersproject/wordlists": "^5.5.0" + } + }, + "@ethersproject/json-wallets": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/json-wallets/-/json-wallets-5.5.0.tgz", + "integrity": "sha512-9lA21XQnCdcS72xlBn1jfQdj2A1VUxZzOzi9UkNdnokNKke/9Ya2xA9aIK1SC3PQyBDLt4C+dfps7ULpkvKikQ==", + "dev": true, + "requires": { + "@ethersproject/abstract-signer": "^5.5.0", + "@ethersproject/address": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/hdnode": "^5.5.0", + "@ethersproject/keccak256": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/pbkdf2": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/random": "^5.5.0", + "@ethersproject/strings": "^5.5.0", + "@ethersproject/transactions": "^5.5.0", + "aes-js": "3.0.0", + "scrypt-js": "3.0.1" + }, + "dependencies": { + "aes-js": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-3.0.0.tgz", + "integrity": "sha1-4h3xCtbCBTKVvLuNq0Cwnb6ofk0=", + "dev": true + } + } + }, + "@ethersproject/keccak256": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/keccak256/-/keccak256-5.5.0.tgz", + "integrity": "sha512-5VoFCTjo2rYbBe1l2f4mccaRFN/4VQEYFwwn04aJV2h7qf4ZvI2wFxUE1XOX+snbwCLRzIeikOqtAoPwMza9kg==", + "dev": true, + "requires": { + "@ethersproject/bytes": "^5.5.0", + "js-sha3": "0.8.0" + } + }, + "@ethersproject/logger": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/logger/-/logger-5.5.0.tgz", + "integrity": "sha512-rIY/6WPm7T8n3qS2vuHTUBPdXHl+rGxWxW5okDfo9J4Z0+gRRZT0msvUdIJkE4/HS29GUMziwGaaKO2bWONBrg==", + "dev": true + }, + "@ethersproject/networks": { + "version": "5.5.2", + "resolved": "https://registry.npmjs.org/@ethersproject/networks/-/networks-5.5.2.tgz", + "integrity": "sha512-NEqPxbGBfy6O3x4ZTISb90SjEDkWYDUbEeIFhJly0F7sZjoQMnj5KYzMSkMkLKZ+1fGpx00EDpHQCy6PrDupkQ==", + "dev": true, + "requires": { + "@ethersproject/logger": "^5.5.0" + } + }, + "@ethersproject/pbkdf2": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/pbkdf2/-/pbkdf2-5.5.0.tgz", + "integrity": "sha512-SaDvQFvXPnz1QGpzr6/HToLifftSXGoXrbpZ6BvoZhmx4bNLHrxDe8MZisuecyOziP1aVEwzC2Hasj+86TgWVg==", + "dev": true, + "requires": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/sha2": "^5.5.0" + } + }, + "@ethersproject/properties": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/properties/-/properties-5.5.0.tgz", + "integrity": "sha512-l3zRQg3JkD8EL3CPjNK5g7kMx4qSwiR60/uk5IVjd3oq1MZR5qUg40CNOoEJoX5wc3DyY5bt9EbMk86C7x0DNA==", + "dev": true, + "requires": { + "@ethersproject/logger": "^5.5.0" + } + }, + "@ethersproject/providers": { + "version": "5.5.2", + "resolved": "https://registry.npmjs.org/@ethersproject/providers/-/providers-5.5.2.tgz", + "integrity": "sha512-hkbx7x/MKcRjyrO4StKXCzCpWer6s97xnm34xkfPiarhtEUVAN4TBBpamM+z66WcTt7H5B53YwbRj1n7i8pZoQ==", + "dev": true, + "requires": { + "@ethersproject/abstract-provider": "^5.5.0", + "@ethersproject/abstract-signer": "^5.5.0", + "@ethersproject/address": "^5.5.0", + "@ethersproject/basex": "^5.5.0", + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/constants": "^5.5.0", + "@ethersproject/hash": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/networks": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/random": "^5.5.0", + "@ethersproject/rlp": "^5.5.0", + "@ethersproject/sha2": "^5.5.0", + "@ethersproject/strings": "^5.5.0", + "@ethersproject/transactions": "^5.5.0", + "@ethersproject/web": "^5.5.0", + "bech32": "1.1.4", + "ws": "7.4.6" + }, + "dependencies": { + "ws": { + "version": "7.4.6", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.4.6.tgz", + "integrity": "sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A==", + "dev": true, + "requires": {} + } + } + }, + "@ethersproject/random": { + "version": "5.5.1", + "resolved": "https://registry.npmjs.org/@ethersproject/random/-/random-5.5.1.tgz", + "integrity": "sha512-YaU2dQ7DuhL5Au7KbcQLHxcRHfgyNgvFV4sQOo0HrtW3Zkrc9ctWNz8wXQ4uCSfSDsqX2vcjhroxU5RQRV0nqA==", + "dev": true, + "requires": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0" + } + }, + "@ethersproject/rlp": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/rlp/-/rlp-5.5.0.tgz", + "integrity": "sha512-hLv8XaQ8PTI9g2RHoQGf/WSxBfTB/NudRacbzdxmst5VHAqd1sMibWG7SENzT5Dj3yZ3kJYx+WiRYEcQTAkcYA==", + "dev": true, + "requires": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0" + } + }, + "@ethersproject/sha2": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/sha2/-/sha2-5.5.0.tgz", + "integrity": "sha512-B5UBoglbCiHamRVPLA110J+2uqsifpZaTmid2/7W5rbtYVz6gus6/hSDieIU/6gaKIDcOj12WnOdiymEUHIAOA==", + "dev": true, + "requires": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "hash.js": "1.1.7" + } + }, + "@ethersproject/signing-key": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/signing-key/-/signing-key-5.5.0.tgz", + "integrity": "sha512-5VmseH7qjtNmDdZBswavhotYbWB0bOwKIlOTSlX14rKn5c11QmJwGt4GHeo7NrL/Ycl7uo9AHvEqs5xZgFBTng==", + "dev": true, + "requires": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "bn.js": "^4.11.9", + "elliptic": "6.5.4", + "hash.js": "1.1.7" + } + }, + "@ethersproject/solidity": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/solidity/-/solidity-5.5.0.tgz", + "integrity": "sha512-9NgZs9LhGMj6aCtHXhtmFQ4AN4sth5HuFXVvAQtzmm0jpSCNOTGtrHZJAeYTh7MBjRR8brylWZxBZR9zDStXbw==", + "dev": true, + "requires": { + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/keccak256": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/sha2": "^5.5.0", + "@ethersproject/strings": "^5.5.0" + } + }, + "@ethersproject/strings": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/strings/-/strings-5.5.0.tgz", + "integrity": "sha512-9fy3TtF5LrX/wTrBaT8FGE6TDJyVjOvXynXJz5MT5azq+E6D92zuKNx7i29sWW2FjVOaWjAsiZ1ZWznuduTIIQ==", + "dev": true, + "requires": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/constants": "^5.5.0", + "@ethersproject/logger": "^5.5.0" + } + }, + "@ethersproject/transactions": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/transactions/-/transactions-5.5.0.tgz", + "integrity": "sha512-9RZYSKX26KfzEd/1eqvv8pLauCKzDTub0Ko4LfIgaERvRuwyaNV78mJs7cpIgZaDl6RJui4o49lHwwCM0526zA==", + "dev": true, + "requires": { + "@ethersproject/address": "^5.5.0", + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/constants": "^5.5.0", + "@ethersproject/keccak256": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/rlp": "^5.5.0", + "@ethersproject/signing-key": "^5.5.0" + } + }, + "@ethersproject/units": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/units/-/units-5.5.0.tgz", + "integrity": "sha512-7+DpjiZk4v6wrikj+TCyWWa9dXLNU73tSTa7n0TSJDxkYbV3Yf1eRh9ToMLlZtuctNYu9RDNNy2USq3AdqSbag==", + "dev": true, + "requires": { + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/constants": "^5.5.0", + "@ethersproject/logger": "^5.5.0" + } + }, + "@ethersproject/wallet": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/wallet/-/wallet-5.5.0.tgz", + "integrity": "sha512-Mlu13hIctSYaZmUOo7r2PhNSd8eaMPVXe1wxrz4w4FCE4tDYBywDH+bAR1Xz2ADyXGwqYMwstzTrtUVIsKDO0Q==", + "dev": true, + "requires": { + "@ethersproject/abstract-provider": "^5.5.0", + "@ethersproject/abstract-signer": "^5.5.0", + "@ethersproject/address": "^5.5.0", + "@ethersproject/bignumber": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/hash": "^5.5.0", + "@ethersproject/hdnode": "^5.5.0", + "@ethersproject/json-wallets": "^5.5.0", + "@ethersproject/keccak256": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/random": "^5.5.0", + "@ethersproject/signing-key": "^5.5.0", + "@ethersproject/transactions": "^5.5.0", + "@ethersproject/wordlists": "^5.5.0" + } + }, + "@ethersproject/web": { + "version": "5.5.1", + "resolved": "https://registry.npmjs.org/@ethersproject/web/-/web-5.5.1.tgz", + "integrity": "sha512-olvLvc1CB12sREc1ROPSHTdFCdvMh0J5GSJYiQg2D0hdD4QmJDy8QYDb1CvoqD/bF1c++aeKv2sR5uduuG9dQg==", + "dev": true, + "requires": { + "@ethersproject/base64": "^5.5.0", + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/strings": "^5.5.0" + } + }, + "@ethersproject/wordlists": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@ethersproject/wordlists/-/wordlists-5.5.0.tgz", + "integrity": "sha512-bL0UTReWDiaQJJYOC9sh/XcRu/9i2jMrzf8VLRmPKx58ckSlOJiohODkECCO50dtLZHcGU6MLXQ4OOrgBwP77Q==", + "dev": true, + "requires": { + "@ethersproject/bytes": "^5.5.0", + "@ethersproject/hash": "^5.5.0", + "@ethersproject/logger": "^5.5.0", + "@ethersproject/properties": "^5.5.0", + "@ethersproject/strings": "^5.5.0" + } + }, + "@nomiclabs/hardhat-ethers": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@nomiclabs/hardhat-ethers/-/hardhat-ethers-2.0.4.tgz", + "integrity": "sha512-7LMR344TkdCYkMVF9LuC9VU2NBIi84akQiwqm7OufpWaDgHbWhuanY53rk3SVAW0E4HBk5xn5wl5+bN5f+Mq5w==", + "dev": true, + "requires": {} + }, + "@nomiclabs/hardhat-etherscan": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/@nomiclabs/hardhat-etherscan/-/hardhat-etherscan-2.1.8.tgz", + "integrity": "sha512-0+rj0SsZotVOcTLyDOxnOc3Gulo8upo0rsw/h+gBPcmtj91YqYJNhdARHoBxOhhE8z+5IUQPx+Dii04lXT14PA==", + "dev": true, + "requires": { + "@ethersproject/abi": "^5.1.2", + "@ethersproject/address": "^5.0.2", + "cbor": "^5.0.2", + "debug": "^4.1.1", + "fs-extra": "^7.0.1", + "node-fetch": "^2.6.0", + "semver": "^6.3.0" + } + }, + "@nomiclabs/hardhat-waffle": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@nomiclabs/hardhat-waffle/-/hardhat-waffle-2.0.1.tgz", + "integrity": "sha512-2YR2V5zTiztSH9n8BYWgtv3Q+EL0N5Ltm1PAr5z20uAY4SkkfylJ98CIqt18XFvxTD5x4K2wKBzddjV9ViDAZQ==", + "dev": true, + "requires": { + "@types/sinon-chai": "^3.2.3", + "@types/web3": "1.0.19" + } + }, + "@openzeppelin/contracts": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/@openzeppelin/contracts/-/contracts-3.4.2.tgz", + "integrity": "sha512-z0zMCjyhhp4y7XKAcDAi3Vgms4T2PstwBdahiO0+9NaGICQKjynK3wduSRplTgk4LXmoO1yfDGO5RbjKYxtuxA==", + "dev": true + }, + "@resolver-engine/core": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@resolver-engine/core/-/core-0.3.3.tgz", + "integrity": "sha512-eB8nEbKDJJBi5p5SrvrvILn4a0h42bKtbCTri3ZxCGt6UvoQyp7HnGOfki944bUjBSHKK3RvgfViHn+kqdXtnQ==", + "dev": true, + "requires": { + "debug": "^3.1.0", + "is-url": "^1.2.4", + "request": "^2.85.0" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + } + } + }, + "@resolver-engine/fs": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@resolver-engine/fs/-/fs-0.3.3.tgz", + "integrity": "sha512-wQ9RhPUcny02Wm0IuJwYMyAG8fXVeKdmhm8xizNByD4ryZlx6PP6kRen+t/haF43cMfmaV7T3Cx6ChOdHEhFUQ==", + "dev": true, + "requires": { + "@resolver-engine/core": "^0.3.3", + "debug": "^3.1.0" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + } + } + }, + "@resolver-engine/imports": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@resolver-engine/imports/-/imports-0.3.3.tgz", + "integrity": "sha512-anHpS4wN4sRMwsAbMXhMfOD/y4a4Oo0Cw/5+rue7hSwGWsDOQaAU1ClK1OxjUC35/peazxEl8JaSRRS+Xb8t3Q==", + "dev": true, + "requires": { + "@resolver-engine/core": "^0.3.3", + "debug": "^3.1.0", + "hosted-git-info": "^2.6.0", + "path-browserify": "^1.0.0", + "url": "^0.11.0" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + } + } + }, + "@resolver-engine/imports-fs": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@resolver-engine/imports-fs/-/imports-fs-0.3.3.tgz", + "integrity": "sha512-7Pjg/ZAZtxpeyCFlZR5zqYkz+Wdo84ugB5LApwriT8XFeQoLwGUj4tZFFvvCuxaNCcqZzCYbonJgmGObYBzyCA==", + "dev": true, + "requires": { + "@resolver-engine/fs": "^0.3.3", + "@resolver-engine/imports": "^0.3.3", + "debug": "^3.1.0" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + } + } + }, + "@sentry/core": { + "version": "5.30.0", + "resolved": "https://registry.npmjs.org/@sentry/core/-/core-5.30.0.tgz", + "integrity": "sha512-TmfrII8w1PQZSZgPpUESqjB+jC6MvZJZdLtE/0hZ+SrnKhW3x5WlYLvTXZpcWePYBku7rl2wn1RZu6uT0qCTeg==", + "dev": true, + "requires": { + "@sentry/hub": "5.30.0", + "@sentry/minimal": "5.30.0", + "@sentry/types": "5.30.0", + "@sentry/utils": "5.30.0", + "tslib": "^1.9.3" + } + }, + "@sentry/hub": { + "version": "5.30.0", + "resolved": "https://registry.npmjs.org/@sentry/hub/-/hub-5.30.0.tgz", + "integrity": "sha512-2tYrGnzb1gKz2EkMDQcfLrDTvmGcQPuWxLnJKXJvYTQDGLlEvi2tWz1VIHjunmOvJrB5aIQLhm+dcMRwFZDCqQ==", + "dev": true, + "requires": { + "@sentry/types": "5.30.0", + "@sentry/utils": "5.30.0", + "tslib": "^1.9.3" + } + }, + "@sentry/minimal": { + "version": "5.30.0", + "resolved": "https://registry.npmjs.org/@sentry/minimal/-/minimal-5.30.0.tgz", + "integrity": "sha512-BwWb/owZKtkDX+Sc4zCSTNcvZUq7YcH3uAVlmh/gtR9rmUvbzAA3ewLuB3myi4wWRAMEtny6+J/FN/x+2wn9Xw==", + "dev": true, + "requires": { + "@sentry/hub": "5.30.0", + "@sentry/types": "5.30.0", + "tslib": "^1.9.3" + } + }, + "@sentry/node": { + "version": "5.30.0", + "resolved": "https://registry.npmjs.org/@sentry/node/-/node-5.30.0.tgz", + "integrity": "sha512-Br5oyVBF0fZo6ZS9bxbJZG4ApAjRqAnqFFurMVJJdunNb80brh7a5Qva2kjhm+U6r9NJAB5OmDyPkA1Qnt+QVg==", + "dev": true, + "requires": { + "@sentry/core": "5.30.0", + "@sentry/hub": "5.30.0", + "@sentry/tracing": "5.30.0", + "@sentry/types": "5.30.0", + "@sentry/utils": "5.30.0", + "cookie": "^0.4.1", + "https-proxy-agent": "^5.0.0", + "lru_map": "^0.3.3", + "tslib": "^1.9.3" + } + }, + "@sentry/tracing": { + "version": "5.30.0", + "resolved": "https://registry.npmjs.org/@sentry/tracing/-/tracing-5.30.0.tgz", + "integrity": "sha512-dUFowCr0AIMwiLD7Fs314Mdzcug+gBVo/+NCMyDw8tFxJkwWAKl7Qa2OZxLQ0ZHjakcj1hNKfCQJ9rhyfOl4Aw==", + "dev": true, + "requires": { + "@sentry/hub": "5.30.0", + "@sentry/minimal": "5.30.0", + "@sentry/types": "5.30.0", + "@sentry/utils": "5.30.0", + "tslib": "^1.9.3" + } + }, + "@sentry/types": { + "version": "5.30.0", + "resolved": "https://registry.npmjs.org/@sentry/types/-/types-5.30.0.tgz", + "integrity": "sha512-R8xOqlSTZ+htqrfteCWU5Nk0CDN5ApUTvrlvBuiH1DyP6czDZ4ktbZB0hAgBlVcK0U+qpD3ag3Tqqpa5Q67rPw==", + "dev": true + }, + "@sentry/utils": { + "version": "5.30.0", + "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-5.30.0.tgz", + "integrity": "sha512-zaYmoH0NWWtvnJjC9/CBseXMtKHm/tm40sz3YfJRxeQjyzRqNQPgivpd9R/oDJCYj999mzdW382p/qi2ypjLww==", + "dev": true, + "requires": { + "@sentry/types": "5.30.0", + "tslib": "^1.9.3" + } + }, + "@sinonjs/commons": { + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.3.tgz", + "integrity": "sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ==", + "dev": true, + "requires": { + "type-detect": "4.0.8" + } + }, + "@sinonjs/fake-timers": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-7.1.2.tgz", + "integrity": "sha512-iQADsW4LBMISqZ6Ci1dupJL9pprqwcVFTcOsEmQOEhW+KLCVn/Y4Jrvg2k19fIHCp+iFprriYPTdRcQR8NbUPg==", + "dev": true, + "requires": { + "@sinonjs/commons": "^1.7.0" + } + }, + "@solidity-parser/parser": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@solidity-parser/parser/-/parser-0.14.0.tgz", + "integrity": "sha512-cX0JJRcmPtNUJpzD2K7FdA7qQsTOk1UZnFx2k7qAg9ZRvuaH5NBe5IEdBMXGlmf2+FmjhqbygJ26H8l2SV7aKQ==", + "dev": true, + "requires": { + "antlr4ts": "^0.5.0-alpha.4" + } + }, + "@truffle/hdwallet-provider": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/@truffle/hdwallet-provider/-/hdwallet-provider-1.4.3.tgz", + "integrity": "sha512-Oo8ORAQLfcbLYp6HwG1mpOx6IpVkHv8IkKy25LZUN5Q5bCCqxdlMF0F7CnSXPBdQ+UqZY9+RthC0VrXv9gXiPQ==", + "requires": { + "@trufflesuite/web3-provider-engine": "15.0.13-1", + "ethereum-cryptography": "^0.1.3", + "ethereum-protocol": "^1.0.1", + "ethereumjs-common": "^1.5.0", + "ethereumjs-tx": "^2.1.2", + "ethereumjs-util": "^6.1.0", + "ethereumjs-wallet": "^1.0.1" + } + }, + "@trufflesuite/eth-json-rpc-filters": { + "version": "4.1.2-1", + "resolved": "https://registry.npmjs.org/@trufflesuite/eth-json-rpc-filters/-/eth-json-rpc-filters-4.1.2-1.tgz", + "integrity": "sha512-/MChvC5dw2ck9NU1cZmdovCz2VKbOeIyR4tcxDvA5sT+NaL0rA2/R5U0yI7zsbo1zD+pgqav77rQHTzpUdDNJQ==", + "requires": { + "@trufflesuite/eth-json-rpc-middleware": "^4.4.2-0", + "await-semaphore": "^0.1.3", + "eth-query": "^2.1.2", + "json-rpc-engine": "^5.1.3", + "lodash.flatmap": "^4.5.0", + "safe-event-emitter": "^1.0.1" + } + }, + "@trufflesuite/eth-json-rpc-infura": { + "version": "4.0.3-0", + "resolved": "https://registry.npmjs.org/@trufflesuite/eth-json-rpc-infura/-/eth-json-rpc-infura-4.0.3-0.tgz", + "integrity": "sha512-xaUanOmo0YLqRsL0SfXpFienhdw5bpQ1WEXxMTRi57az4lwpZBv4tFUDvcerdwJrxX9wQqNmgUgd1BrR01dumw==", + "requires": { + "@trufflesuite/eth-json-rpc-middleware": "^4.4.2-1", + "cross-fetch": "^2.1.1", + "eth-json-rpc-errors": "^1.0.1", + "json-rpc-engine": "^5.1.3" + }, + "dependencies": { + "eth-json-rpc-errors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/eth-json-rpc-errors/-/eth-json-rpc-errors-1.1.1.tgz", + "integrity": "sha512-WT5shJ5KfNqHi9jOZD+ID8I1kuYWNrigtZat7GOQkvwo99f8SzAVaEcWhJUv656WiZOAg3P1RiJQANtUmDmbIg==", + "requires": { + "fast-safe-stringify": "^2.0.6" + } + } + } + }, + "@trufflesuite/eth-json-rpc-middleware": { + "version": "4.4.2-1", + "resolved": "https://registry.npmjs.org/@trufflesuite/eth-json-rpc-middleware/-/eth-json-rpc-middleware-4.4.2-1.tgz", + "integrity": "sha512-iEy9H8ja7/8aYES5HfrepGBKU9n/Y4OabBJEklVd/zIBlhCCBAWBqkIZgXt11nBXO/rYAeKwYuE3puH3ByYnLA==", + "requires": { + "@trufflesuite/eth-sig-util": "^1.4.2", + "btoa": "^1.2.1", + "clone": "^2.1.1", + "eth-json-rpc-errors": "^1.0.1", + "eth-query": "^2.1.2", + "ethereumjs-block": "^1.6.0", + "ethereumjs-tx": "^1.3.7", + "ethereumjs-util": "^5.1.2", + "ethereumjs-vm": "^2.6.0", + "fetch-ponyfill": "^4.0.0", + "json-rpc-engine": "^5.1.3", + "json-stable-stringify": "^1.0.1", + "pify": "^3.0.0", + "safe-event-emitter": "^1.0.1" + }, + "dependencies": { + "eth-json-rpc-errors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/eth-json-rpc-errors/-/eth-json-rpc-errors-1.1.1.tgz", + "integrity": "sha512-WT5shJ5KfNqHi9jOZD+ID8I1kuYWNrigtZat7GOQkvwo99f8SzAVaEcWhJUv656WiZOAg3P1RiJQANtUmDmbIg==", + "requires": { + "fast-safe-stringify": "^2.0.6" + } + }, + "ethereum-common": { + "version": "0.0.18", + "resolved": "https://registry.npmjs.org/ethereum-common/-/ethereum-common-0.0.18.tgz", + "integrity": "sha1-L9w1dvIykDNYl26znaeDIT/5Uj8=" + }, + "ethereumjs-tx": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/ethereumjs-tx/-/ethereumjs-tx-1.3.7.tgz", + "integrity": "sha512-wvLMxzt1RPhAQ9Yi3/HKZTn0FZYpnsmQdbKYfUUpi4j1SEIcbkd9tndVjcPrufY3V7j2IebOpC00Zp2P/Ay2kA==", + "requires": { + "ethereum-common": "^0.0.18", + "ethereumjs-util": "^5.0.0" + } + }, + "ethereumjs-util": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz", + "integrity": "sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ==", + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + } + } + }, + "@trufflesuite/eth-sig-util": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/@trufflesuite/eth-sig-util/-/eth-sig-util-1.4.2.tgz", + "integrity": "sha512-+GyfN6b0LNW77hbQlH3ufZ/1eCON7mMrGym6tdYf7xiNw9Vv3jBO72bmmos1EId2NgBvPMhmYYm6DSLQFTmzrA==", + "requires": { + "ethereumjs-abi": "^0.6.8", + "ethereumjs-util": "^5.1.1" + }, + "dependencies": { + "ethereumjs-util": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz", + "integrity": "sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ==", + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + } + } + }, + "@trufflesuite/web3-provider-engine": { + "version": "15.0.13-1", + "resolved": "https://registry.npmjs.org/@trufflesuite/web3-provider-engine/-/web3-provider-engine-15.0.13-1.tgz", + "integrity": "sha512-6u3x/iIN5fyj8pib5QTUDmIOUiwAGhaqdSTXdqCu6v9zo2BEwdCqgEJd1uXDh3DBmPRDfiZ/ge8oUPy7LerpHg==", + "requires": { + "@trufflesuite/eth-json-rpc-filters": "^4.1.2-1", + "@trufflesuite/eth-json-rpc-infura": "^4.0.3-0", + "@trufflesuite/eth-json-rpc-middleware": "^4.4.2-1", + "@trufflesuite/eth-sig-util": "^1.4.2", + "async": "^2.5.0", + "backoff": "^2.5.0", + "clone": "^2.0.0", + "cross-fetch": "^2.1.0", + "eth-block-tracker": "^4.4.2", + "eth-json-rpc-errors": "^2.0.2", + "ethereumjs-block": "^1.2.2", + "ethereumjs-tx": "^1.2.0", + "ethereumjs-util": "^5.1.5", + "ethereumjs-vm": "^2.3.4", + "json-stable-stringify": "^1.0.1", + "promise-to-callback": "^1.0.0", + "readable-stream": "^2.2.9", + "request": "^2.85.0", + "semaphore": "^1.0.3", + "ws": "^5.1.1", + "xhr": "^2.2.0", + "xtend": "^4.0.1" + }, + "dependencies": { + "ethereum-common": { + "version": "0.0.18", + "resolved": "https://registry.npmjs.org/ethereum-common/-/ethereum-common-0.0.18.tgz", + "integrity": "sha1-L9w1dvIykDNYl26znaeDIT/5Uj8=" + }, + "ethereumjs-tx": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/ethereumjs-tx/-/ethereumjs-tx-1.3.7.tgz", + "integrity": "sha512-wvLMxzt1RPhAQ9Yi3/HKZTn0FZYpnsmQdbKYfUUpi4j1SEIcbkd9tndVjcPrufY3V7j2IebOpC00Zp2P/Ay2kA==", + "requires": { + "ethereum-common": "^0.0.18", + "ethereumjs-util": "^5.0.0" + } + }, + "ethereumjs-util": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz", + "integrity": "sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ==", + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + } + } + }, + "@typechain/ethers-v5": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@typechain/ethers-v5/-/ethers-v5-2.0.0.tgz", + "integrity": "sha512-0xdCkyGOzdqh4h5JSf+zoWx85IusEjDcPIwNEHP8mrWSnCae4rvrqB+/gtpdNfX7zjlFlZiMeePn2r63EI3Lrw==", + "dev": true, + "requires": { + "ethers": "^5.0.2" + } + }, + "@types/abstract-leveldown": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@types/abstract-leveldown/-/abstract-leveldown-7.2.0.tgz", + "integrity": "sha512-q5veSX6zjUy/DlDhR4Y4cU0k2Ar+DT2LUraP00T19WLmTO6Se1djepCCaqU6nQrwcJ5Hyo/CWqxTzrrFg8eqbQ==", + "dev": true + }, + "@types/bn.js": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@types/bn.js/-/bn.js-5.1.0.tgz", + "integrity": "sha512-QSSVYj7pYFN49kW77o2s9xTCwZ8F2xLbjLLSEVh8D2F4JUhZtPAGOFLTD+ffqksBx/u4cE/KImFjyhqCjn/LIA==", + "requires": { + "@types/node": "*" + } + }, + "@types/chai": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.0.tgz", + "integrity": "sha512-/ceqdqeRraGolFTcfoXNiqjyQhZzbINDngeoAq9GoHa8PPK1yNzTaxWjA6BFWp5Ua9JpXEMSS4s5i9tS0hOJtw==", + "dev": true + }, + "@types/level-errors": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/level-errors/-/level-errors-3.0.0.tgz", + "integrity": "sha512-/lMtoq/Cf/2DVOm6zE6ORyOM+3ZVm/BvzEZVxUhf6bgh8ZHglXlBqxbxSlJeVp8FCbD3IVvk/VbsaNmDjrQvqQ==", + "dev": true + }, + "@types/levelup": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/@types/levelup/-/levelup-4.3.3.tgz", + "integrity": "sha512-K+OTIjJcZHVlZQN1HmU64VtrC0jC3dXWQozuEIR9zVvltIk90zaGPM2AgT+fIkChpzHhFE3YnvFLCbLtzAmexA==", + "dev": true, + "requires": { + "@types/abstract-leveldown": "*", + "@types/level-errors": "*", + "@types/node": "*" + } + }, + "@types/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@types/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-ssE3Vlrys7sdIzs5LOxCzTVMsU7i9oa/IaW92wF32JFb3CVczqOkru2xspuKczHEbG3nvmPY7IFqVmGGHdNbYw==", + "dev": true + }, + "@types/mkdirp": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/@types/mkdirp/-/mkdirp-0.5.2.tgz", + "integrity": "sha512-U5icWpv7YnZYGsN4/cmh3WD2onMY0aJIiTE6+51TwJCttdHvtCYmkBNOobHlXwrJRL0nkH9jH4kD+1FAdMN4Tg==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@types/node": { + "version": "17.0.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.10.tgz", + "integrity": "sha512-S/3xB4KzyFxYGCppyDt68yzBU9ysL88lSdIah4D6cptdcltc4NCPCAMc0+PCpg/lLIyC7IPvj2Z52OJWeIUkog==" + }, + "@types/node-fetch": { + "version": "2.5.12", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.5.12.tgz", + "integrity": "sha512-MKgC4dlq4kKNa/mYrwpKfzQMB5X3ee5U6fSprkKpToBqBmX4nFZL9cW5jl6sWn+xpRJ7ypWh2yyqqr8UUCstSw==", + "dev": true, + "requires": { + "@types/node": "*", + "form-data": "^3.0.0" + } + }, + "@types/pbkdf2": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@types/pbkdf2/-/pbkdf2-3.1.0.tgz", + "integrity": "sha512-Cf63Rv7jCQ0LaL8tNXmEyqTHuIJxRdlS5vMh1mj5voN4+QFhVZnlZruezqpWYDiJ8UTzhP0VmeLXCmBk66YrMQ==", + "requires": { + "@types/node": "*" + } + }, + "@types/prettier": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.4.3.tgz", + "integrity": "sha512-QzSuZMBuG5u8HqYz01qtMdg/Jfctlnvj1z/lYnIDXs/golxw0fxtRAHd9KrzjR7Yxz1qVeI00o0kiO3PmVdJ9w==", + "dev": true + }, + "@types/resolve": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-0.0.8.tgz", + "integrity": "sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@types/secp256k1": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@types/secp256k1/-/secp256k1-4.0.3.tgz", + "integrity": "sha512-Da66lEIFeIz9ltsdMZcpQvmrmmoqrfju8pm1BH8WbYjZSwUgCwXLb9C+9XYogwBITnbsSaMdVPb2ekf7TV+03w==", + "requires": { + "@types/node": "*" + } + }, + "@types/sinon": { + "version": "10.0.6", + "resolved": "https://registry.npmjs.org/@types/sinon/-/sinon-10.0.6.tgz", + "integrity": "sha512-6EF+wzMWvBNeGrfP3Nx60hhx+FfwSg1JJBLAAP/IdIUq0EYkqCYf70VT3PhuhPX9eLD+Dp+lNdpb/ZeHG8Yezg==", + "dev": true, + "requires": { + "@sinonjs/fake-timers": "^7.1.0" + } + }, + "@types/sinon-chai": { + "version": "3.2.8", + "resolved": "https://registry.npmjs.org/@types/sinon-chai/-/sinon-chai-3.2.8.tgz", + "integrity": "sha512-d4ImIQbT/rKMG8+AXpmcan5T2/PNeSjrYhvkwet6z0p8kzYtfgA32xzOBlbU0yqJfq+/0Ml805iFoODO0LP5/g==", + "dev": true, + "requires": { + "@types/chai": "*", + "@types/sinon": "*" + } + }, + "@types/underscore": { + "version": "1.11.4", + "resolved": "https://registry.npmjs.org/@types/underscore/-/underscore-1.11.4.tgz", + "integrity": "sha512-uO4CD2ELOjw8tasUrAhvnn2W4A0ZECOvMjCivJr4gA9pGgjv+qxKWY9GLTMVEK8ej85BxQOocUyE7hImmSQYcg==", + "dev": true + }, + "@types/web3": { + "version": "1.0.19", + "resolved": "https://registry.npmjs.org/@types/web3/-/web3-1.0.19.tgz", + "integrity": "sha512-fhZ9DyvDYDwHZUp5/STa9XW2re0E8GxoioYJ4pEUZ13YHpApSagixj7IAdoYH5uAK+UalGq6Ml8LYzmgRA/q+A==", + "dev": true, + "requires": { + "@types/bn.js": "*", + "@types/underscore": "*" + } + }, + "@yarnpkg/lockfile": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz", + "integrity": "sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ==", + "dev": true + }, + "abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dev": true, + "requires": { + "event-target-shim": "^5.0.0" + } + }, + "abstract-leveldown": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.3.0.tgz", + "integrity": "sha512-TU5nlYgta8YrBMNpc9FwQzRbiXsj49gsALsXadbGHt9CROPzX5fB0rWDR5mtdpOOKa5XqRFpbj1QroPAoPzVjQ==", + "dev": true, + "requires": { + "buffer": "^5.5.0", + "immediate": "^3.2.3", + "level-concat-iterator": "~2.0.0", + "level-supports": "~1.0.0", + "xtend": "~4.0.0" + } + }, + "acorn": { + "version": "6.4.2", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.2.tgz", + "integrity": "sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ==", + "dev": true + }, + "acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "requires": {} + }, + "adm-zip": { + "version": "0.4.16", + "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.4.16.tgz", + "integrity": "sha512-TFi4HBKSGfIKsK5YCkKaaFG2m4PEDyViZmEwof3MTIgzimHLto6muaHVpbrljdIvIrFZzEq/p4nafOeLcYegrg==", + "dev": true + }, + "aes-js": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-3.1.2.tgz", + "integrity": "sha512-e5pEa2kBnBOgR4Y/p20pskXI74UEz7de8ZGVo58asOtvSVG5YAbJeELPZxOmt+Bnz3rX753YKhfIn4X4l1PPRQ==" + }, + "agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "requires": { + "debug": "4" + } + }, + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true + }, + "ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "requires": { + "type-fest": "^0.21.3" + } + }, + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "requires": { + "color-convert": "^1.9.0" + } + }, + "antlr4": { + "version": "4.7.1", + "resolved": "https://registry.npmjs.org/antlr4/-/antlr4-4.7.1.tgz", + "integrity": "sha512-haHyTW7Y9joE5MVs37P2lNYfU2RWBLfcRDD8OWldcdZm5TiCE91B5Xl1oWSwiDUSd4rlExpt2pu1fksYQjRBYQ==", + "dev": true + }, + "antlr4ts": { + "version": "0.5.0-alpha.4", + "resolved": "https://registry.npmjs.org/antlr4ts/-/antlr4ts-0.5.0-alpha.4.tgz", + "integrity": "sha512-WPQDt1B74OfPv/IMS2ekXAKkTZIHl88uMetg6q3OTqgFxZ/dxDXI0EWLyZid/1Pe6hTftyg5N7gel5wNAGxXyQ==", + "dev": true + }, + "anymatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", + "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", + "dev": true, + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "requires": { + "sprintf-js": "~1.0.2" + } + }, + "array-back": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/array-back/-/array-back-2.0.0.tgz", + "integrity": "sha512-eJv4pLLufP3g5kcZry0j6WXpIbzYw9GUB4mVJZno9wfwiBxbizTnHCw3VJb07cBihbFX48Y7oSrW9y+gt4glyw==", + "dev": true, + "requires": { + "typical": "^2.6.1" + } + }, + "asn1": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", + "requires": { + "safer-buffer": "~2.1.0" + } + }, + "assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" + }, + "assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true + }, + "ast-parents": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/ast-parents/-/ast-parents-0.0.1.tgz", + "integrity": "sha1-UI/Q8F0MSHddnszaLhdEIyYejdM=", + "dev": true + }, + "astral-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz", + "integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==", + "dev": true + }, + "async": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", + "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", + "requires": { + "lodash": "^4.17.14" + } + }, + "async-eventemitter": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/async-eventemitter/-/async-eventemitter-0.2.4.tgz", + "integrity": "sha512-pd20BwL7Yt1zwDFy+8MX8F1+WCT8aQeKj0kQnTrH9WaeRETlRamVhD0JtRPmrV4GfOJ2F9CvdQkZeZhnh2TuHw==", + "requires": { + "async": "^2.4.0" + } + }, + "async-limiter": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.1.tgz", + "integrity": "sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==" + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" + }, + "await-semaphore": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/await-semaphore/-/await-semaphore-0.1.3.tgz", + "integrity": "sha512-d1W2aNSYcz/sxYO4pMGX9vq65qOTu0P800epMud+6cYYX0QcT7zyqcxec3VWzpgvdXo57UWmVbZpLMjX2m1I7Q==" + }, + "aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" + }, + "aws4": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", + "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==" + }, + "axios": { + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz", + "integrity": "sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==", + "dev": true, + "requires": { + "follow-redirects": "^1.14.0" + } + }, + "babel-plugin-polyfill-corejs2": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.1.tgz", + "integrity": "sha512-v7/T6EQcNfVLfcN2X8Lulb7DjprieyLWJK/zOWH5DUYcAgex9sP3h25Q+DLsX9TloXe3y1O8l2q2Jv9q8UVB9w==", + "requires": { + "@babel/compat-data": "^7.13.11", + "@babel/helper-define-polyfill-provider": "^0.3.1", + "semver": "^6.1.1" + } + }, + "babel-plugin-polyfill-corejs3": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.1.tgz", + "integrity": "sha512-TihqEe4sQcb/QcPJvxe94/9RZuLQuF1+To4WqQcRvc+3J3gLCPIPgDKzGLG6zmQLfH3nn25heRuDNkS2KR4I8A==", + "requires": { + "@babel/helper-define-polyfill-provider": "^0.3.1", + "core-js-compat": "^3.20.0" + } + }, + "babel-plugin-polyfill-regenerator": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.3.1.tgz", + "integrity": "sha512-Y2B06tvgHYt1x0yz17jGkGeeMr5FeKUu+ASJ+N6nB5lQ8Dapfg42i0OVrf8PNGJ3zKL4A23snMi1IRwrqqND7A==", + "requires": { + "@babel/helper-define-polyfill-provider": "^0.3.1" + } + }, + "backoff": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/backoff/-/backoff-2.5.0.tgz", + "integrity": "sha1-9hbtqdPktmuMp/ynn2lXIsX44m8=", + "requires": { + "precond": "0.2" + } + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "base-x": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/base-x/-/base-x-3.0.9.tgz", + "integrity": "sha512-H7JU6iBHTal1gp56aKoaa//YUxEaAOUiydvrV/pILqIHXTtqxSkATOnDA2u+jZ/61sD+L/412+7kzXRtWukhpQ==", + "requires": { + "safe-buffer": "^5.0.1" + } + }, + "base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true + }, + "bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "requires": { + "tweetnacl": "^0.14.3" + }, + "dependencies": { + "tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + } + } + }, + "bech32": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/bech32/-/bech32-1.1.4.tgz", + "integrity": "sha512-s0IrSOzLlbvX7yp4WBfPITzpAU8sqQcpsmwXDiKwrG4r491vwCO/XpejasRNl0piBMe/DvP4Tz0mIS/X1DPJBQ==", + "dev": true + }, + "bignumber.js": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.2.tgz", + "integrity": "sha512-GAcQvbpsM0pUb0zw1EI0KhQEZ+lRwR5fYaAp3vPOYuP7aDvGy6cVN6XHLauvF8SOga2y0dcLcjt3iQDTSEliyw==", + "dev": true + }, + "binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true + }, + "blakejs": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/blakejs/-/blakejs-1.1.1.tgz", + "integrity": "sha512-bLG6PHOCZJKNshTjGRBvET0vTciwQE6zFKOKKXPDJfwFBd4Ac0yBfPZqcGvGJap50l7ktvlpFqc2jGVaUgbJgg==" + }, + "bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", + "dev": true + }, + "bn.js": { + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "brorand": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz", + "integrity": "sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8=" + }, + "browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "browserify-aes": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", + "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==", + "requires": { + "buffer-xor": "^1.0.3", + "cipher-base": "^1.0.0", + "create-hash": "^1.1.0", + "evp_bytestokey": "^1.0.3", + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "browserslist": { + "version": "4.19.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.19.1.tgz", + "integrity": "sha512-u2tbbG5PdKRTUoctO3NBD8FQ5HdPh1ZXPHzp1rwaa5jTc+RV9/+RlWiAIKmjRPQF+xbGM9Kklj5bZQFa2s/38A==", + "requires": { + "caniuse-lite": "^1.0.30001286", + "electron-to-chromium": "^1.4.17", + "escalade": "^3.1.1", + "node-releases": "^2.0.1", + "picocolors": "^1.0.0" + } + }, + "bs58": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/bs58/-/bs58-4.0.1.tgz", + "integrity": "sha1-vhYedsNU9veIrkBx9j806MTwpCo=", + "requires": { + "base-x": "^3.0.2" + } + }, + "bs58check": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/bs58check/-/bs58check-2.1.2.tgz", + "integrity": "sha512-0TS1jicxdU09dwJMNZtVAfzPi6Q6QeN0pM1Fkzrjn+XYHvzMKPU3pHVpva+769iNVSfIYWf7LJ6WR+BuuMf8cA==", + "requires": { + "bs58": "^4.0.0", + "create-hash": "^1.1.0", + "safe-buffer": "^5.1.2" + } + }, + "btoa": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/btoa/-/btoa-1.2.1.tgz", + "integrity": "sha512-SB4/MIGlsiVkMcHmT+pSmIPoNDoHg+7cMzmt3Uxt628MTz2487DKSqK/fuhFBrkuqrYv5UCEnACpF4dTFNKc/g==" + }, + "buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true + }, + "buffer-xor": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", + "integrity": "sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=" + }, + "bytes": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.1.tgz", + "integrity": "sha512-dWe4nWO/ruEOY7HkUJ5gFt1DCFV9zPRoJr8pV0/ASQermOZjtq8jMjOprC0Kd10GLN+l7xaUPvxzJFWtxGu8Fg==", + "dev": true + }, + "call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + } + }, + "caller-callsite": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/caller-callsite/-/caller-callsite-2.0.0.tgz", + "integrity": "sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ=", + "dev": true, + "requires": { + "callsites": "^2.0.0" + } + }, + "caller-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/caller-path/-/caller-path-2.0.0.tgz", + "integrity": "sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ=", + "dev": true, + "requires": { + "caller-callsite": "^2.0.0" + } + }, + "callsites": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-2.0.0.tgz", + "integrity": "sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA=", + "dev": true + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + }, + "caniuse-lite": { + "version": "1.0.30001300", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001300.tgz", + "integrity": "sha512-cVjiJHWGcNlJi8TZVKNMnvMid3Z3TTdDHmLDzlOdIiZq138Exvo0G+G0wTdVYolxKb4AYwC+38pxodiInVtJSA==" + }, + "caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + }, + "cbor": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/cbor/-/cbor-5.2.0.tgz", + "integrity": "sha512-5IMhi9e1QU76ppa5/ajP1BmMWZ2FHkhAhjeVKQ/EFCgYSEaeVaoGtL7cxJskf9oCCk+XjzaIdc3IuU/dbA/o2A==", + "dev": true, + "requires": { + "bignumber.js": "^9.0.1", + "nofilter": "^1.0.4" + } + }, + "chai": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.4.tgz", + "integrity": "sha512-yS5H68VYOCtN1cjfwumDSuzn/9c+yza4f3reKXlE5rUg7SFcCEy90gJvydNgOYtblyf4Zi6jIWRnXOgErta0KA==", + "dev": true, + "requires": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^3.0.1", + "get-func-name": "^2.0.0", + "pathval": "^1.1.1", + "type-detect": "^4.0.5" + } + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "chardet": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", + "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", + "dev": true + }, + "check-error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=", + "dev": true + }, + "checkpoint-store": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/checkpoint-store/-/checkpoint-store-1.1.0.tgz", + "integrity": "sha1-BOTLUWuRQziTWB5tRgGnjpVS6gY=", + "requires": { + "functional-red-black-tree": "^1.0.1" + } + }, + "chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, + "requires": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "fsevents": "~2.3.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + } + }, + "ci-info": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", + "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==", + "dev": true + }, + "cipher-base": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", + "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", + "requires": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "circular": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/circular/-/circular-1.0.5.tgz", + "integrity": "sha1-fad6+Yu96c5LWzWM1Va13e0tMUk=", + "dev": true + }, + "cli-cursor": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", + "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "dev": true, + "requires": { + "restore-cursor": "^2.0.0" + } + }, + "cli-logger": { + "version": "0.5.40", + "resolved": "https://registry.npmjs.org/cli-logger/-/cli-logger-0.5.40.tgz", + "integrity": "sha1-CX8OEbByx8aYomxH9YiinCC0iws=", + "dev": true, + "requires": { + "circular": "^1.0.5", + "cli-util": "~1.1.27" + } + }, + "cli-regexp": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/cli-regexp/-/cli-regexp-0.1.2.tgz", + "integrity": "sha1-a82TsJ+y7RAl0woRVdWZeVSlNRI=", + "dev": true + }, + "cli-util": { + "version": "1.1.27", + "resolved": "https://registry.npmjs.org/cli-util/-/cli-util-1.1.27.tgz", + "integrity": "sha1-QtaeNqBAoyH8nPhRwVE8rcUJMFQ=", + "dev": true, + "requires": { + "cli-regexp": "~0.1.0" + } + }, + "cli-width": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.1.tgz", + "integrity": "sha512-GRMWDxpOB6Dgk2E5Uo+3eEBvtOOlimMmpbFiKuLFnQzYDavtLFY3K5ona41jgN/WdRZtG7utuVSVTL4HbZHGkw==", + "dev": true + }, + "cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "dev": true, + "requires": { + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "clone": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz", + "integrity": "sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18=" + }, + "code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", + "dev": true + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "command-exists": { + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/command-exists/-/command-exists-1.2.9.tgz", + "integrity": "sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w==", + "dev": true + }, + "command-line-args": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/command-line-args/-/command-line-args-4.0.7.tgz", + "integrity": "sha512-aUdPvQRAyBvQd2n7jXcsMDz68ckBJELXNzBybCHOibUWEg0mWTnaYCSRU8h9R+aNRSvDihJtssSRCiDRpLaezA==", + "dev": true, + "requires": { + "array-back": "^2.0.0", + "find-replace": "^1.0.3", + "typical": "^2.6.1" + } + }, + "commander": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/commander/-/commander-3.0.2.tgz", + "integrity": "sha512-Gar0ASD4BDyKC4hl4DwHqDrmvjoxWKZigVnAbn5H1owvm4CxCPdb0HQDehwNYMJpla5+M2tPmPARzhtYuwpHow==", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "convert-source-map": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz", + "integrity": "sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==", + "peer": true, + "requires": { + "safe-buffer": "~5.1.1" + } + }, + "cookie": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.1.tgz", + "integrity": "sha512-ZwrFkGJxUR3EIoXtO+yVE69Eb7KlixbaeAWfBQB9vVsNn/o+Yw69gBWSSDK825hQNdN+wF8zELf3dFNl/kxkUA==", + "dev": true + }, + "core-js-compat": { + "version": "3.20.3", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.20.3.tgz", + "integrity": "sha512-c8M5h0IkNZ+I92QhIpuSijOxGAcj3lgpsWdkCqmUTZNwidujF4r3pi6x1DCN+Vcs5qTS2XWWMfWSuCqyupX8gw==", + "requires": { + "browserslist": "^4.19.1", + "semver": "7.0.0" + }, + "dependencies": { + "semver": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", + "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==" + } + } + }, + "core-js-pure": { + "version": "3.20.3", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.20.3.tgz", + "integrity": "sha512-Q2H6tQ5MtPtcC7f3HxJ48i4Q7T9ybPKgvWyuH7JXIoNa2pm0KuBnycsET/qw1SLLZYfbsbrZQNMeIOClb+6WIA==", + "dev": true + }, + "core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" + }, + "cosmiconfig": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-5.2.1.tgz", + "integrity": "sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA==", + "dev": true, + "requires": { + "import-fresh": "^2.0.0", + "is-directory": "^0.3.1", + "js-yaml": "^3.13.1", + "parse-json": "^4.0.0" + }, + "dependencies": { + "parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", + "dev": true, + "requires": { + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" + } + } + } + }, + "crc-32": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.0.tgz", + "integrity": "sha512-1uBwHxF+Y/4yF5G48fwnKq6QsIXheor3ZLPT80yGBV1oEUwpPojlEhQbWKVw1VwcTQyMGHK1/XMmTjmlsmTTGA==", + "dev": true, + "requires": { + "exit-on-epipe": "~1.0.1", + "printj": "~1.1.0" + } + }, + "create-hash": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", + "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", + "requires": { + "cipher-base": "^1.0.1", + "inherits": "^2.0.1", + "md5.js": "^1.3.4", + "ripemd160": "^2.0.1", + "sha.js": "^2.4.0" + } + }, + "create-hmac": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", + "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==", + "requires": { + "cipher-base": "^1.0.3", + "create-hash": "^1.1.0", + "inherits": "^2.0.1", + "ripemd160": "^2.0.0", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + } + }, + "cross-fetch": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-2.2.5.tgz", + "integrity": "sha512-xqYAhQb4NhCJSRym03dwxpP1bYXpK3y7UN83Bo2WFi3x1Zmzn0SL/6xGoPr+gpt4WmNrgCCX3HPysvOwFOW36w==", + "requires": { + "node-fetch": "2.6.1", + "whatwg-fetch": "2.0.4" + }, + "dependencies": { + "node-fetch": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz", + "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==" + } + } + }, + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dev": true, + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } + } + }, + "dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "requires": { + "assert-plus": "^1.0.0" + } + }, + "debug": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", + "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "requires": { + "ms": "2.1.2" + } + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true + }, + "deep-eql": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", + "dev": true, + "requires": { + "type-detect": "^4.0.0" + } + }, + "deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "deferred-leveldown": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/deferred-leveldown/-/deferred-leveldown-5.3.0.tgz", + "integrity": "sha512-a59VOT+oDy7vtAbLRCZwWgxu2BaCfd5Hk7wxJd48ei7I+nsg8Orlb9CLG0PMZienk9BSUKgeAqkO2+Lw+1+Ukw==", + "dev": true, + "requires": { + "abstract-leveldown": "~6.2.1", + "inherits": "^2.0.3" + }, + "dependencies": { + "abstract-leveldown": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz", + "integrity": "sha512-BsLm5vFMRUrrLeCcRc+G0t2qOaTzpoJQLOubq2XM72eNpjF5UdU5o/5NvlNhx95XHcAvcl8OMXr4mlg/fRgUXQ==", + "dev": true, + "requires": { + "buffer": "^5.5.0", + "immediate": "^3.2.3", + "level-concat-iterator": "~2.0.0", + "level-supports": "~1.0.0", + "xtend": "~4.0.0" + } + } + } + }, + "define-properties": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "dev": true, + "requires": { + "object-keys": "^1.0.12" + } + }, + "delay": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/delay/-/delay-5.0.0.tgz", + "integrity": "sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw==", + "dev": true + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" + }, + "depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=", + "dev": true + }, + "diff": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", + "dev": true + }, + "doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "requires": { + "esutils": "^2.0.2" + } + }, + "dom-walk": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.2.tgz", + "integrity": "sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w==" + }, + "ecc-jsbn": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", + "requires": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } + }, + "electron-to-chromium": { + "version": "1.4.48", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.48.tgz", + "integrity": "sha512-RT3SEmpv7XUA+tKXrZGudAWLDpa7f8qmhjcLaM6OD/ERxjQ/zAojT8/Vvo0BSzbArkElFZ1WyZ9FuwAYbkdBNA==" + }, + "elliptic": { + "version": "6.5.4", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz", + "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==", + "requires": { + "bn.js": "^4.11.9", + "brorand": "^1.1.0", + "hash.js": "^1.0.0", + "hmac-drbg": "^1.0.1", + "inherits": "^2.0.4", + "minimalistic-assert": "^1.0.1", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "encoding": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", + "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", + "requires": { + "iconv-lite": "^0.6.2" + }, + "dependencies": { + "iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "requires": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + } + } + } + }, + "encoding-down": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/encoding-down/-/encoding-down-6.3.0.tgz", + "integrity": "sha512-QKrV0iKR6MZVJV08QY0wp1e7vF6QbhnbQhb07bwpEyuz4uZiZgPlEGdkCROuFkUwdxlFaiPIhjyarH1ee/3vhw==", + "dev": true, + "requires": { + "abstract-leveldown": "^6.2.1", + "inherits": "^2.0.3", + "level-codec": "^9.0.0", + "level-errors": "^2.0.0" + } + }, + "enquirer": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", + "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", + "dev": true, + "requires": { + "ansi-colors": "^4.1.1" + } + }, + "env-paths": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", + "dev": true + }, + "errno": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz", + "integrity": "sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==", + "requires": { + "prr": "~1.0.1" + } + }, + "error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "requires": { + "is-arrayish": "^0.2.1" + } + }, + "es-abstract": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.19.1.tgz", + "integrity": "sha512-2vJ6tjA/UfqLm2MPs7jxVybLoB8i1t1Jd9R3kISld20sIxPcTbLuggQOUxeWeAvIUkduv/CfMjuh4WmiXr2v9w==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "get-intrinsic": "^1.1.1", + "get-symbol-description": "^1.0.0", + "has": "^1.0.3", + "has-symbols": "^1.0.2", + "internal-slot": "^1.0.3", + "is-callable": "^1.2.4", + "is-negative-zero": "^2.0.1", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.1", + "is-string": "^1.0.7", + "is-weakref": "^1.0.1", + "object-inspect": "^1.11.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.2", + "string.prototype.trimend": "^1.0.4", + "string.prototype.trimstart": "^1.0.4", + "unbox-primitive": "^1.0.1" + }, + "dependencies": { + "object.assign": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz", + "integrity": "sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==", + "dev": true, + "requires": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "has-symbols": "^1.0.1", + "object-keys": "^1.1.1" + } + } + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" + }, + "eslint": { + "version": "5.16.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz", + "integrity": "sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.9.1", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^4.0.3", + "eslint-utils": "^1.3.1", + "eslint-visitor-keys": "^1.0.0", + "espree": "^5.0.1", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob": "^7.1.2", + "globals": "^11.7.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^6.2.2", + "js-yaml": "^3.13.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.11", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.2", + "path-is-inside": "^1.0.2", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^5.5.1", + "strip-ansi": "^4.0.0", + "strip-json-comments": "^2.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0" + }, + "dependencies": { + "import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "requires": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + } + }, + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } + } + }, + "eslint-scope": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", + "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "eslint-utils": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", + "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + }, + "eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "dev": true + }, + "espree": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-5.0.1.tgz", + "integrity": "sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A==", + "dev": true, + "requires": { + "acorn": "^6.0.7", + "acorn-jsx": "^5.0.0", + "eslint-visitor-keys": "^1.0.0" + } + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true + }, + "esquery": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", + "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "dev": true, + "requires": { + "estraverse": "^5.1.0" + }, + "dependencies": { + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true + } + } + }, + "esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "requires": { + "estraverse": "^5.2.0" + }, + "dependencies": { + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true + } + } + }, + "estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true + }, + "esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true + }, + "eth-block-tracker": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/eth-block-tracker/-/eth-block-tracker-4.4.3.tgz", + "integrity": "sha512-A8tG4Z4iNg4mw5tP1Vung9N9IjgMNqpiMoJ/FouSFwNCGHv2X0mmOYwtQOJzki6XN7r7Tyo01S29p7b224I4jw==", + "requires": { + "@babel/plugin-transform-runtime": "^7.5.5", + "@babel/runtime": "^7.5.5", + "eth-query": "^2.1.0", + "json-rpc-random-id": "^1.0.1", + "pify": "^3.0.0", + "safe-event-emitter": "^1.0.1" + } + }, + "eth-ens-namehash": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/eth-ens-namehash/-/eth-ens-namehash-2.0.8.tgz", + "integrity": "sha1-IprEbsqG1S4MmR58sq74P/D2i88=", + "dev": true, + "requires": { + "idna-uts46-hx": "^2.3.1", + "js-sha3": "^0.5.7" + }, + "dependencies": { + "js-sha3": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.5.7.tgz", + "integrity": "sha1-DU/9gALVMzqrr0oj7tL2N0yfKOc=", + "dev": true + } + } + }, + "eth-json-rpc-errors": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/eth-json-rpc-errors/-/eth-json-rpc-errors-2.0.2.tgz", + "integrity": "sha512-uBCRM2w2ewusRHGxN8JhcuOb2RN3ueAOYH/0BhqdFmQkZx5lj5+fLKTz0mIVOzd4FG5/kUksCzCD7eTEim6gaA==", + "requires": { + "fast-safe-stringify": "^2.0.6" + } + }, + "eth-query": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/eth-query/-/eth-query-2.1.2.tgz", + "integrity": "sha1-1nQdkAAQa1FRDHLbktY2VFam2l4=", + "requires": { + "json-rpc-random-id": "^1.0.0", + "xtend": "^4.0.1" + } + }, + "eth-rpc-errors": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eth-rpc-errors/-/eth-rpc-errors-3.0.0.tgz", + "integrity": "sha512-iPPNHPrLwUlR9xCSYm7HHQjWBasor3+KZfRvwEWxMz3ca0yqnlBeJrnyphkGIXZ4J7AMAaOLmwy4AWhnxOiLxg==", + "requires": { + "fast-safe-stringify": "^2.0.6" + } + }, + "eth-sig-util": { + "version": "2.5.4", + "resolved": "https://registry.npmjs.org/eth-sig-util/-/eth-sig-util-2.5.4.tgz", + "integrity": "sha512-aCMBwp8q/4wrW4QLsF/HYBOSA7TpLKmkVwP3pYQNkEEseW2Rr8Z5Uxc9/h6HX+OG3tuHo+2bINVSihIeBfym6A==", + "dev": true, + "requires": { + "ethereumjs-abi": "0.6.8", + "ethereumjs-util": "^5.1.1", + "tweetnacl": "^1.0.3", + "tweetnacl-util": "^0.15.0" + }, + "dependencies": { + "ethereumjs-util": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz", + "integrity": "sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ==", + "dev": true, + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + } + } + }, + "ethereum-bloom-filters": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/ethereum-bloom-filters/-/ethereum-bloom-filters-1.0.10.tgz", + "integrity": "sha512-rxJ5OFN3RwjQxDcFP2Z5+Q9ho4eIdEmSc2ht0fCu8Se9nbXjZ7/031uXoUYJ87KHCOdVeiUuwSnoS7hmYAGVHA==", + "dev": true, + "requires": { + "js-sha3": "^0.8.0" + } + }, + "ethereum-common": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/ethereum-common/-/ethereum-common-0.2.0.tgz", + "integrity": "sha512-XOnAR/3rntJgbCdGhqdaLIxDLWKLmsZOGhHdBKadEr6gEnJLH52k93Ou+TUdFaPN3hJc3isBZBal3U/XZ15abA==" + }, + "ethereum-cryptography": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/ethereum-cryptography/-/ethereum-cryptography-0.1.3.tgz", + "integrity": "sha512-w8/4x1SGGzc+tO97TASLja6SLd3fRIK2tLVcV2Gx4IB21hE19atll5Cq9o3d0ZmAYC/8aw0ipieTSiekAea4SQ==", + "requires": { + "@types/pbkdf2": "^3.0.0", + "@types/secp256k1": "^4.0.1", + "blakejs": "^1.1.0", + "browserify-aes": "^1.2.0", + "bs58check": "^2.1.2", + "create-hash": "^1.2.0", + "create-hmac": "^1.1.7", + "hash.js": "^1.1.7", + "keccak": "^3.0.0", + "pbkdf2": "^3.0.17", + "randombytes": "^2.1.0", + "safe-buffer": "^5.1.2", + "scrypt-js": "^3.0.0", + "secp256k1": "^4.0.1", + "setimmediate": "^1.0.5" + } + }, + "ethereum-protocol": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ethereum-protocol/-/ethereum-protocol-1.0.1.tgz", + "integrity": "sha512-3KLX1mHuEsBW0dKG+c6EOJS1NBNqdCICvZW9sInmZTt5aY0oxmHVggYRE0lJu1tcnMD1K+AKHdLi6U43Awm1Vg==" + }, + "ethereum-waffle": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/ethereum-waffle/-/ethereum-waffle-3.4.0.tgz", + "integrity": "sha512-ADBqZCkoSA5Isk486ntKJVjFEawIiC+3HxNqpJqONvh3YXBTNiRfXvJtGuAFLXPG91QaqkGqILEHANAo7j/olQ==", + "dev": true, + "requires": { + "@ethereum-waffle/chai": "^3.4.0", + "@ethereum-waffle/compiler": "^3.4.0", + "@ethereum-waffle/mock-contract": "^3.3.0", + "@ethereum-waffle/provider": "^3.4.0", + "ethers": "^5.0.1" + } + }, + "ethereumjs-abi": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/ethereumjs-abi/-/ethereumjs-abi-0.6.8.tgz", + "integrity": "sha512-Tx0r/iXI6r+lRsdvkFDlut0N08jWMnKRZ6Gkq+Nmw75lZe4e6o3EkSnkaBP5NF6+m5PTGAr9JP43N3LyeoglsA==", + "requires": { + "bn.js": "^4.11.8", + "ethereumjs-util": "^6.0.0" + } + }, + "ethereumjs-account": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/ethereumjs-account/-/ethereumjs-account-2.0.5.tgz", + "integrity": "sha512-bgDojnXGjhMwo6eXQC0bY6UK2liSFUSMwwylOmQvZbSl/D7NXQ3+vrGO46ZeOgjGfxXmgIeVNDIiHw7fNZM4VA==", + "requires": { + "ethereumjs-util": "^5.0.0", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + }, + "dependencies": { + "ethereumjs-util": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz", + "integrity": "sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ==", + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + } + } + }, + "ethereumjs-block": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/ethereumjs-block/-/ethereumjs-block-1.7.1.tgz", + "integrity": "sha512-B+sSdtqm78fmKkBq78/QLKJbu/4Ts4P2KFISdgcuZUPDm9x+N7qgBPIIFUGbaakQh8bzuquiRVbdmvPKqbILRg==", + "requires": { + "async": "^2.0.1", + "ethereum-common": "0.2.0", + "ethereumjs-tx": "^1.2.2", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + }, + "dependencies": { + "ethereumjs-tx": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/ethereumjs-tx/-/ethereumjs-tx-1.3.7.tgz", + "integrity": "sha512-wvLMxzt1RPhAQ9Yi3/HKZTn0FZYpnsmQdbKYfUUpi4j1SEIcbkd9tndVjcPrufY3V7j2IebOpC00Zp2P/Ay2kA==", + "requires": { + "ethereum-common": "^0.0.18", + "ethereumjs-util": "^5.0.0" + }, + "dependencies": { + "ethereum-common": { + "version": "0.0.18", + "resolved": "https://registry.npmjs.org/ethereum-common/-/ethereum-common-0.0.18.tgz", + "integrity": "sha1-L9w1dvIykDNYl26znaeDIT/5Uj8=" + } + } + }, + "ethereumjs-util": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz", + "integrity": "sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ==", + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + } + } + }, + "ethereumjs-common": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/ethereumjs-common/-/ethereumjs-common-1.5.2.tgz", + "integrity": "sha512-hTfZjwGX52GS2jcVO6E2sx4YuFnf0Fhp5ylo4pEPhEffNln7vS59Hr5sLnp3/QCazFLluuBZ+FZ6J5HTp0EqCA==" + }, + "ethereumjs-tx": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ethereumjs-tx/-/ethereumjs-tx-2.1.2.tgz", + "integrity": "sha512-zZEK1onCeiORb0wyCXUvg94Ve5It/K6GD1K+26KfFKodiBiS6d9lfCXlUKGBBdQ+bv7Day+JK0tj1K+BeNFRAw==", + "requires": { + "ethereumjs-common": "^1.5.0", + "ethereumjs-util": "^6.0.0" + } + }, + "ethereumjs-util": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-6.2.1.tgz", + "integrity": "sha512-W2Ktez4L01Vexijrm5EB6w7dg4n/TgpoYU4avuT5T3Vmnw/eCRtiBrJfQYS/DCSvDIOLn2k57GcHdeBcgVxAqw==", + "requires": { + "@types/bn.js": "^4.11.3", + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "0.1.6", + "rlp": "^2.2.3" + }, + "dependencies": { + "@types/bn.js": { + "version": "4.11.6", + "resolved": "https://registry.npmjs.org/@types/bn.js/-/bn.js-4.11.6.tgz", + "integrity": "sha512-pqr857jrp2kPuO9uRjZ3PwnJTjoQy+fcdxvBTvHm6dkmEL9q+hDD/2j/0ELOBPtPnS8LjCX0gI9nbl8lVkadpg==", + "requires": { + "@types/node": "*" + } + } + } + }, + "ethereumjs-vm": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/ethereumjs-vm/-/ethereumjs-vm-2.6.0.tgz", + "integrity": "sha512-r/XIUik/ynGbxS3y+mvGnbOKnuLo40V5Mj1J25+HEO63aWYREIqvWeRO/hnROlMBE5WoniQmPmhiaN0ctiHaXw==", + "requires": { + "async": "^2.1.2", + "async-eventemitter": "^0.2.2", + "ethereumjs-account": "^2.0.3", + "ethereumjs-block": "~2.2.0", + "ethereumjs-common": "^1.1.0", + "ethereumjs-util": "^6.0.0", + "fake-merkle-patricia-tree": "^1.0.1", + "functional-red-black-tree": "^1.0.1", + "merkle-patricia-tree": "^2.3.2", + "rustbn.js": "~0.2.0", + "safe-buffer": "^5.1.1" + }, + "dependencies": { + "ethereumjs-block": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/ethereumjs-block/-/ethereumjs-block-2.2.2.tgz", + "integrity": "sha512-2p49ifhek3h2zeg/+da6XpdFR3GlqY3BIEiqxGF8j9aSRIgkb7M1Ky+yULBKJOu8PAZxfhsYA+HxUk2aCQp3vg==", + "requires": { + "async": "^2.0.1", + "ethereumjs-common": "^1.5.0", + "ethereumjs-tx": "^2.1.1", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + }, + "dependencies": { + "ethereumjs-util": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz", + "integrity": "sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ==", + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + } + } + } + } + }, + "ethereumjs-wallet": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/ethereumjs-wallet/-/ethereumjs-wallet-1.0.2.tgz", + "integrity": "sha512-CCWV4RESJgRdHIvFciVQFnCHfqyhXWchTPlkfp28Qc53ufs+doi5I/cV2+xeK9+qEo25XCWfP9MiL+WEPAZfdA==", + "requires": { + "aes-js": "^3.1.2", + "bs58check": "^2.1.2", + "ethereum-cryptography": "^0.1.3", + "ethereumjs-util": "^7.1.2", + "randombytes": "^2.1.0", + "scrypt-js": "^3.0.1", + "utf8": "^3.0.0", + "uuid": "^8.3.2" + }, + "dependencies": { + "bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==" + }, + "ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "requires": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + } + } + } + }, + "ethers": { + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/ethers/-/ethers-5.5.3.tgz", + "integrity": "sha512-fTT4WT8/hTe/BLwRUtl7I5zlpF3XC3P/Xwqxc5AIP2HGlH15qpmjs0Ou78az93b1rLITzXLFxoNX63B8ZbUd7g==", + "dev": true, + "requires": { + "@ethersproject/abi": "5.5.0", + "@ethersproject/abstract-provider": "5.5.1", + "@ethersproject/abstract-signer": "5.5.0", + "@ethersproject/address": "5.5.0", + "@ethersproject/base64": "5.5.0", + "@ethersproject/basex": "5.5.0", + "@ethersproject/bignumber": "5.5.0", + "@ethersproject/bytes": "5.5.0", + "@ethersproject/constants": "5.5.0", + "@ethersproject/contracts": "5.5.0", + "@ethersproject/hash": "5.5.0", + "@ethersproject/hdnode": "5.5.0", + "@ethersproject/json-wallets": "5.5.0", + "@ethersproject/keccak256": "5.5.0", + "@ethersproject/logger": "5.5.0", + "@ethersproject/networks": "5.5.2", + "@ethersproject/pbkdf2": "5.5.0", + "@ethersproject/properties": "5.5.0", + "@ethersproject/providers": "5.5.2", + "@ethersproject/random": "5.5.1", + "@ethersproject/rlp": "5.5.0", + "@ethersproject/sha2": "5.5.0", + "@ethersproject/signing-key": "5.5.0", + "@ethersproject/solidity": "5.5.0", + "@ethersproject/strings": "5.5.0", + "@ethersproject/transactions": "5.5.0", + "@ethersproject/units": "5.5.0", + "@ethersproject/wallet": "5.5.0", + "@ethersproject/web": "5.5.1", + "@ethersproject/wordlists": "5.5.0" + } + }, + "ethjs-unit": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/ethjs-unit/-/ethjs-unit-0.1.6.tgz", + "integrity": "sha1-xmWSHkduh7ziqdWIpv4EBbLEFpk=", + "dev": true, + "requires": { + "bn.js": "4.11.6", + "number-to-bn": "1.7.0" + }, + "dependencies": { + "bn.js": { + "version": "4.11.6", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.6.tgz", + "integrity": "sha1-UzRK2xRhehP26N0s4okF0cC6MhU=", + "dev": true + } + } + }, + "ethjs-util": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/ethjs-util/-/ethjs-util-0.1.6.tgz", + "integrity": "sha512-CUnVOQq7gSpDHZVVrQW8ExxUETWrnrvXYvYz55wOU8Uj4VCgw56XC2B/fVqQN+f7gmrnRHSLVnFAwsCuNwji8w==", + "requires": { + "is-hex-prefixed": "1.0.0", + "strip-hex-prefix": "1.0.0" + } + }, + "event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "dev": true + }, + "events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==" + }, + "evp_bytestokey": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", + "integrity": "sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==", + "requires": { + "md5.js": "^1.3.4", + "safe-buffer": "^5.1.1" + } + }, + "exit-on-epipe": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/exit-on-epipe/-/exit-on-epipe-1.0.1.tgz", + "integrity": "sha512-h2z5mrROTxce56S+pnvAV890uu7ls7f1kEvVGJbw1OlFH3/mlJ5bkXu0KRyW94v37zzHPiUd55iLn3DA7TjWpw==", + "dev": true + }, + "extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, + "external-editor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", + "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "dev": true, + "requires": { + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", + "tmp": "^0.0.33" + } + }, + "extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" + }, + "fake-merkle-patricia-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/fake-merkle-patricia-tree/-/fake-merkle-patricia-tree-1.0.1.tgz", + "integrity": "sha1-S4w6z7Ugr635hgsfFM2M40As3dM=", + "requires": { + "checkpoint-store": "^1.1.0" + } + }, + "fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "fast-diff": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz", + "integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==", + "dev": true + }, + "fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + }, + "fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", + "dev": true + }, + "fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, + "fetch-ponyfill": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/fetch-ponyfill/-/fetch-ponyfill-4.1.0.tgz", + "integrity": "sha1-rjzl9zLGReq4fkroeTQUcJsjmJM=", + "requires": { + "node-fetch": "~1.7.1" + }, + "dependencies": { + "node-fetch": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-1.7.3.tgz", + "integrity": "sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==", + "requires": { + "encoding": "^0.1.11", + "is-stream": "^1.0.1" + } + } + } + }, + "figures": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", + "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "file-entry-cache": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", + "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", + "dev": true, + "requires": { + "flat-cache": "^2.0.1" + } + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "find-replace": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/find-replace/-/find-replace-1.0.3.tgz", + "integrity": "sha1-uI5zZNLZyVlVnziMZmcNYTBEH6A=", + "dev": true, + "requires": { + "array-back": "^1.0.4", + "test-value": "^2.1.0" + }, + "dependencies": { + "array-back": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/array-back/-/array-back-1.0.4.tgz", + "integrity": "sha1-ZEun8JX3/898Q7Xw3DnTwfA8Bjs=", + "dev": true, + "requires": { + "typical": "^2.6.0" + } + } + } + }, + "find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "dev": true, + "requires": { + "locate-path": "^2.0.0" + } + }, + "find-yarn-workspace-root": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/find-yarn-workspace-root/-/find-yarn-workspace-root-2.0.0.tgz", + "integrity": "sha512-1IMnbjt4KzsQfnhnzNd8wUEgXZ44IzZaZmnLYx7D5FZlaHt2gW20Cri8Q+E/t5tIj4+epTBub+2Zxu/vNILzqQ==", + "dev": true, + "requires": { + "micromatch": "^4.0.2" + } + }, + "flat": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/flat/-/flat-4.1.1.tgz", + "integrity": "sha512-FmTtBsHskrU6FJ2VxCnsDb84wu9zhmO3cUX2kGFb5tuwhfXxGciiT0oRY+cck35QmG+NmGh5eLz6lLCpWTqwpA==", + "dev": true, + "requires": { + "is-buffer": "~2.0.3" + } + }, + "flat-cache": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", + "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", + "dev": true, + "requires": { + "flatted": "^2.0.0", + "rimraf": "2.6.3", + "write": "1.0.3" + }, + "dependencies": { + "rimraf": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", + "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + } + } + }, + "flatted": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz", + "integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==", + "dev": true + }, + "follow-redirects": { + "version": "1.14.7", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.7.tgz", + "integrity": "sha512-+hbxoLbFMbRKDwohX8GkTataGqO6Jb7jGwpAlwgy2bIz25XtRm7KEzJM76R1WiNT5SwZkX4Y75SwBolkpmE7iQ==", + "dev": true + }, + "forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" + }, + "form-data": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", + "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "dev": true, + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + }, + "fp-ts": { + "version": "1.19.3", + "resolved": "https://registry.npmjs.org/fp-ts/-/fp-ts-1.19.3.tgz", + "integrity": "sha512-H5KQDspykdHuztLTg+ajGN0Z2qUjcEf3Ybxc6hLt0k7/zPkn29XnKnxlBPyW2XIddWrGaJBzBl4VLYOtk39yZg==", + "dev": true + }, + "fs-extra": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-7.0.1.tgz", + "integrity": "sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + } + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=" + }, + "ganache-core": { + "version": "2.13.2", + "resolved": "https://registry.npmjs.org/ganache-core/-/ganache-core-2.13.2.tgz", + "integrity": "sha512-tIF5cR+ANQz0+3pHWxHjIwHqFXcVo0Mb+kcsNhglNFALcYo49aQpnS9dqHartqPfMFjiHh/qFoD3mYK0d/qGgw==", + "dev": true, + "requires": { + "abstract-leveldown": "3.0.0", + "async": "2.6.2", + "bip39": "2.5.0", + "cachedown": "1.0.0", + "clone": "2.1.2", + "debug": "3.2.6", + "encoding-down": "5.0.4", + "eth-sig-util": "3.0.0", + "ethereumjs-abi": "0.6.8", + "ethereumjs-account": "3.0.0", + "ethereumjs-block": "2.2.2", + "ethereumjs-common": "1.5.0", + "ethereumjs-tx": "2.1.2", + "ethereumjs-util": "6.2.1", + "ethereumjs-vm": "4.2.0", + "ethereumjs-wallet": "0.6.5", + "heap": "0.2.6", + "keccak": "3.0.1", + "level-sublevel": "6.6.4", + "levelup": "3.1.1", + "lodash": "4.17.20", + "lru-cache": "5.1.1", + "merkle-patricia-tree": "3.0.0", + "patch-package": "6.2.2", + "seedrandom": "3.0.1", + "source-map-support": "0.5.12", + "tmp": "0.1.0", + "web3": "1.2.11", + "web3-provider-engine": "14.2.1", + "websocket": "1.0.32" + }, + "dependencies": { + "@ethersproject/abi": { + "version": "5.0.0-beta.153", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/address": ">=5.0.0-beta.128", + "@ethersproject/bignumber": ">=5.0.0-beta.130", + "@ethersproject/bytes": ">=5.0.0-beta.129", + "@ethersproject/constants": ">=5.0.0-beta.128", + "@ethersproject/hash": ">=5.0.0-beta.128", + "@ethersproject/keccak256": ">=5.0.0-beta.127", + "@ethersproject/logger": ">=5.0.0-beta.129", + "@ethersproject/properties": ">=5.0.0-beta.131", + "@ethersproject/strings": ">=5.0.0-beta.130" + } + }, + "@ethersproject/abstract-provider": { + "version": "5.0.8", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/bignumber": "^5.0.13", + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/logger": "^5.0.8", + "@ethersproject/networks": "^5.0.7", + "@ethersproject/properties": "^5.0.7", + "@ethersproject/transactions": "^5.0.9", + "@ethersproject/web": "^5.0.12" + } + }, + "@ethersproject/abstract-signer": { + "version": "5.0.10", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/abstract-provider": "^5.0.8", + "@ethersproject/bignumber": "^5.0.13", + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/logger": "^5.0.8", + "@ethersproject/properties": "^5.0.7" + } + }, + "@ethersproject/address": { + "version": "5.0.9", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/bignumber": "^5.0.13", + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/keccak256": "^5.0.7", + "@ethersproject/logger": "^5.0.8", + "@ethersproject/rlp": "^5.0.7" + } + }, + "@ethersproject/base64": { + "version": "5.0.7", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/bytes": "^5.0.9" + } + }, + "@ethersproject/bignumber": { + "version": "5.0.13", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/logger": "^5.0.8", + "bn.js": "^4.4.0" + } + }, + "@ethersproject/bytes": { + "version": "5.0.9", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/logger": "^5.0.8" + } + }, + "@ethersproject/constants": { + "version": "5.0.8", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/bignumber": "^5.0.13" + } + }, + "@ethersproject/hash": { + "version": "5.0.10", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/abstract-signer": "^5.0.10", + "@ethersproject/address": "^5.0.9", + "@ethersproject/bignumber": "^5.0.13", + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/keccak256": "^5.0.7", + "@ethersproject/logger": "^5.0.8", + "@ethersproject/properties": "^5.0.7", + "@ethersproject/strings": "^5.0.8" + } + }, + "@ethersproject/keccak256": { + "version": "5.0.7", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/bytes": "^5.0.9", + "js-sha3": "0.5.7" + } + }, + "@ethersproject/logger": { + "version": "5.0.8", + "dev": true, + "optional": true + }, + "@ethersproject/networks": { + "version": "5.0.7", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/logger": "^5.0.8" + } + }, + "@ethersproject/properties": { + "version": "5.0.7", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/logger": "^5.0.8" + } + }, + "@ethersproject/rlp": { + "version": "5.0.7", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/logger": "^5.0.8" + } + }, + "@ethersproject/signing-key": { + "version": "5.0.8", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/logger": "^5.0.8", + "@ethersproject/properties": "^5.0.7", + "elliptic": "6.5.3" + } + }, + "@ethersproject/strings": { + "version": "5.0.8", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/constants": "^5.0.8", + "@ethersproject/logger": "^5.0.8" + } + }, + "@ethersproject/transactions": { + "version": "5.0.9", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/address": "^5.0.9", + "@ethersproject/bignumber": "^5.0.13", + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/constants": "^5.0.8", + "@ethersproject/keccak256": "^5.0.7", + "@ethersproject/logger": "^5.0.8", + "@ethersproject/properties": "^5.0.7", + "@ethersproject/rlp": "^5.0.7", + "@ethersproject/signing-key": "^5.0.8" + } + }, + "@ethersproject/web": { + "version": "5.0.12", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/base64": "^5.0.7", + "@ethersproject/bytes": "^5.0.9", + "@ethersproject/logger": "^5.0.8", + "@ethersproject/properties": "^5.0.7", + "@ethersproject/strings": "^5.0.8" + } + }, + "@sindresorhus/is": { + "version": "0.14.0", + "dev": true, + "optional": true + }, + "@szmarczak/http-timer": { + "version": "1.1.2", + "dev": true, + "optional": true, + "requires": { + "defer-to-connect": "^1.0.1" + } + }, + "@types/bn.js": { + "version": "4.11.6", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@types/node": { + "version": "14.14.20", + "dev": true + }, + "@types/pbkdf2": { + "version": "3.1.0", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@types/secp256k1": { + "version": "4.0.1", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@yarnpkg/lockfile": { + "version": "1.1.0", + "dev": true + }, + "abstract-leveldown": { + "version": "3.0.0", + "dev": true, + "requires": { + "xtend": "~4.0.0" + } + }, + "accepts": { + "version": "1.3.7", + "dev": true, + "optional": true, + "requires": { + "mime-types": "~2.1.24", + "negotiator": "0.6.2" + } + }, + "aes-js": { + "version": "3.1.2", + "dev": true, + "optional": true + }, + "ajv": { + "version": "6.12.6", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ansi-styles": { + "version": "3.2.1", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "arr-diff": { + "version": "4.0.0", + "dev": true + }, + "arr-flatten": { + "version": "1.1.0", + "dev": true + }, + "arr-union": { + "version": "3.1.0", + "dev": true + }, + "array-flatten": { + "version": "1.1.1", + "dev": true, + "optional": true + }, + "array-unique": { + "version": "0.3.2", + "dev": true + }, + "asn1": { + "version": "0.2.4", + "dev": true, + "requires": { + "safer-buffer": "~2.1.0" + } + }, + "asn1.js": { + "version": "5.4.1", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^4.0.0", + "inherits": "^2.0.1", + "minimalistic-assert": "^1.0.0", + "safer-buffer": "^2.1.0" + } + }, + "assert-plus": { + "version": "1.0.0", + "dev": true + }, + "assign-symbols": { + "version": "1.0.0", + "dev": true + }, + "async": { + "version": "2.6.2", + "dev": true, + "requires": { + "lodash": "^4.17.11" + } + }, + "async-eventemitter": { + "version": "0.2.4", + "dev": true, + "requires": { + "async": "^2.4.0" + } + }, + "async-limiter": { + "version": "1.0.1", + "dev": true + }, + "asynckit": { + "version": "0.4.0", + "dev": true + }, + "atob": { + "version": "2.1.2", + "dev": true + }, + "aws-sign2": { + "version": "0.7.0", + "dev": true + }, + "aws4": { + "version": "1.11.0", + "dev": true + }, + "babel-code-frame": { + "version": "6.26.0", + "dev": true, + "requires": { + "chalk": "^1.1.3", + "esutils": "^2.0.2", + "js-tokens": "^3.0.2" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "dev": true + }, + "ansi-styles": { + "version": "2.2.1", + "dev": true + }, + "chalk": { + "version": "1.1.3", + "dev": true, + "requires": { + "ansi-styles": "^2.2.1", + "escape-string-regexp": "^1.0.2", + "has-ansi": "^2.0.0", + "strip-ansi": "^3.0.0", + "supports-color": "^2.0.0" + } + }, + "js-tokens": { + "version": "3.0.2", + "dev": true + }, + "strip-ansi": { + "version": "3.0.1", + "dev": true, + "requires": { + "ansi-regex": "^2.0.0" + } + }, + "supports-color": { + "version": "2.0.0", + "dev": true + } + } + }, + "babel-core": { + "version": "6.26.3", + "dev": true, + "requires": { + "babel-code-frame": "^6.26.0", + "babel-generator": "^6.26.0", + "babel-helpers": "^6.24.1", + "babel-messages": "^6.23.0", + "babel-register": "^6.26.0", + "babel-runtime": "^6.26.0", + "babel-template": "^6.26.0", + "babel-traverse": "^6.26.0", + "babel-types": "^6.26.0", + "babylon": "^6.18.0", + "convert-source-map": "^1.5.1", + "debug": "^2.6.9", + "json5": "^0.5.1", + "lodash": "^4.17.4", + "minimatch": "^3.0.4", + "path-is-absolute": "^1.0.1", + "private": "^0.1.8", + "slash": "^1.0.0", + "source-map": "^0.5.7" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "json5": { + "version": "0.5.1", + "dev": true + }, + "ms": { + "version": "2.0.0", + "dev": true + }, + "slash": { + "version": "1.0.0", + "dev": true + } + } + }, + "babel-generator": { + "version": "6.26.1", + "dev": true, + "requires": { + "babel-messages": "^6.23.0", + "babel-runtime": "^6.26.0", + "babel-types": "^6.26.0", + "detect-indent": "^4.0.0", + "jsesc": "^1.3.0", + "lodash": "^4.17.4", + "source-map": "^0.5.7", + "trim-right": "^1.0.1" + }, + "dependencies": { + "jsesc": { + "version": "1.3.0", + "dev": true + } + } + }, + "babel-helper-builder-binary-assignment-operator-visitor": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-helper-explode-assignable-expression": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "babel-helper-call-delegate": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-helper-hoist-variables": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "babel-helper-define-map": { + "version": "6.26.0", + "dev": true, + "requires": { + "babel-helper-function-name": "^6.24.1", + "babel-runtime": "^6.26.0", + "babel-types": "^6.26.0", + "lodash": "^4.17.4" + } + }, + "babel-helper-explode-assignable-expression": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "babel-helper-function-name": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-helper-get-function-arity": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "babel-helper-get-function-arity": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "babel-helper-hoist-variables": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "babel-helper-optimise-call-expression": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "babel-helper-regex": { + "version": "6.26.0", + "dev": true, + "requires": { + "babel-runtime": "^6.26.0", + "babel-types": "^6.26.0", + "lodash": "^4.17.4" + } + }, + "babel-helper-remap-async-to-generator": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-helper-function-name": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "babel-helper-replace-supers": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-helper-optimise-call-expression": "^6.24.1", + "babel-messages": "^6.23.0", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "babel-helpers": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1" + } + }, + "babel-messages": { + "version": "6.23.0", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0" + } + }, + "babel-plugin-check-es2015-constants": { + "version": "6.22.0", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0" + } + }, + "babel-plugin-syntax-async-functions": { + "version": "6.13.0", + "dev": true + }, + "babel-plugin-syntax-exponentiation-operator": { + "version": "6.13.0", + "dev": true + }, + "babel-plugin-syntax-trailing-function-commas": { + "version": "6.22.0", + "dev": true + }, + "babel-plugin-transform-async-to-generator": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-helper-remap-async-to-generator": "^6.24.1", + "babel-plugin-syntax-async-functions": "^6.8.0", + "babel-runtime": "^6.22.0" + } + }, + "babel-plugin-transform-es2015-arrow-functions": { + "version": "6.22.0", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0" + } + }, + "babel-plugin-transform-es2015-block-scoped-functions": { + "version": "6.22.0", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0" + } + }, + "babel-plugin-transform-es2015-block-scoping": { + "version": "6.26.0", + "dev": true, + "requires": { + "babel-runtime": "^6.26.0", + "babel-template": "^6.26.0", + "babel-traverse": "^6.26.0", + "babel-types": "^6.26.0", + "lodash": "^4.17.4" + } + }, + "babel-plugin-transform-es2015-classes": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-helper-define-map": "^6.24.1", + "babel-helper-function-name": "^6.24.1", + "babel-helper-optimise-call-expression": "^6.24.1", + "babel-helper-replace-supers": "^6.24.1", + "babel-messages": "^6.23.0", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "babel-plugin-transform-es2015-computed-properties": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1" + } + }, + "babel-plugin-transform-es2015-destructuring": { + "version": "6.23.0", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0" + } + }, + "babel-plugin-transform-es2015-duplicate-keys": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "babel-plugin-transform-es2015-for-of": { + "version": "6.23.0", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0" + } + }, + "babel-plugin-transform-es2015-function-name": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-helper-function-name": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "babel-plugin-transform-es2015-literals": { + "version": "6.22.0", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0" + } + }, + "babel-plugin-transform-es2015-modules-amd": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-plugin-transform-es2015-modules-commonjs": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1" + } + }, + "babel-plugin-transform-es2015-modules-commonjs": { + "version": "6.26.2", + "dev": true, + "requires": { + "babel-plugin-transform-strict-mode": "^6.24.1", + "babel-runtime": "^6.26.0", + "babel-template": "^6.26.0", + "babel-types": "^6.26.0" + } + }, + "babel-plugin-transform-es2015-modules-systemjs": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-helper-hoist-variables": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1" + } + }, + "babel-plugin-transform-es2015-modules-umd": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-plugin-transform-es2015-modules-amd": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1" + } + }, + "babel-plugin-transform-es2015-object-super": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-helper-replace-supers": "^6.24.1", + "babel-runtime": "^6.22.0" + } + }, + "babel-plugin-transform-es2015-parameters": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-helper-call-delegate": "^6.24.1", + "babel-helper-get-function-arity": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "babel-plugin-transform-es2015-shorthand-properties": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "babel-plugin-transform-es2015-spread": { + "version": "6.22.0", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0" + } + }, + "babel-plugin-transform-es2015-sticky-regex": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-helper-regex": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "babel-plugin-transform-es2015-template-literals": { + "version": "6.22.0", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0" + } + }, + "babel-plugin-transform-es2015-typeof-symbol": { + "version": "6.23.0", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0" + } + }, + "babel-plugin-transform-es2015-unicode-regex": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-helper-regex": "^6.24.1", + "babel-runtime": "^6.22.0", + "regexpu-core": "^2.0.0" + } + }, + "babel-plugin-transform-exponentiation-operator": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-helper-builder-binary-assignment-operator-visitor": "^6.24.1", + "babel-plugin-syntax-exponentiation-operator": "^6.8.0", + "babel-runtime": "^6.22.0" + } + }, + "babel-plugin-transform-regenerator": { + "version": "6.26.0", + "dev": true, + "requires": { + "regenerator-transform": "^0.10.0" + } + }, + "babel-plugin-transform-strict-mode": { + "version": "6.24.1", + "dev": true, + "requires": { + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "babel-preset-env": { + "version": "1.7.0", + "dev": true, + "requires": { + "babel-plugin-check-es2015-constants": "^6.22.0", + "babel-plugin-syntax-trailing-function-commas": "^6.22.0", + "babel-plugin-transform-async-to-generator": "^6.22.0", + "babel-plugin-transform-es2015-arrow-functions": "^6.22.0", + "babel-plugin-transform-es2015-block-scoped-functions": "^6.22.0", + "babel-plugin-transform-es2015-block-scoping": "^6.23.0", + "babel-plugin-transform-es2015-classes": "^6.23.0", + "babel-plugin-transform-es2015-computed-properties": "^6.22.0", + "babel-plugin-transform-es2015-destructuring": "^6.23.0", + "babel-plugin-transform-es2015-duplicate-keys": "^6.22.0", + "babel-plugin-transform-es2015-for-of": "^6.23.0", + "babel-plugin-transform-es2015-function-name": "^6.22.0", + "babel-plugin-transform-es2015-literals": "^6.22.0", + "babel-plugin-transform-es2015-modules-amd": "^6.22.0", + "babel-plugin-transform-es2015-modules-commonjs": "^6.23.0", + "babel-plugin-transform-es2015-modules-systemjs": "^6.23.0", + "babel-plugin-transform-es2015-modules-umd": "^6.23.0", + "babel-plugin-transform-es2015-object-super": "^6.22.0", + "babel-plugin-transform-es2015-parameters": "^6.23.0", + "babel-plugin-transform-es2015-shorthand-properties": "^6.22.0", + "babel-plugin-transform-es2015-spread": "^6.22.0", + "babel-plugin-transform-es2015-sticky-regex": "^6.22.0", + "babel-plugin-transform-es2015-template-literals": "^6.22.0", + "babel-plugin-transform-es2015-typeof-symbol": "^6.23.0", + "babel-plugin-transform-es2015-unicode-regex": "^6.22.0", + "babel-plugin-transform-exponentiation-operator": "^6.22.0", + "babel-plugin-transform-regenerator": "^6.22.0", + "browserslist": "^3.2.6", + "invariant": "^2.2.2", + "semver": "^5.3.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "dev": true + } + } + }, + "babel-register": { + "version": "6.26.0", + "dev": true, + "requires": { + "babel-core": "^6.26.0", + "babel-runtime": "^6.26.0", + "core-js": "^2.5.0", + "home-or-tmp": "^2.0.0", + "lodash": "^4.17.4", + "mkdirp": "^0.5.1", + "source-map-support": "^0.4.15" + }, + "dependencies": { + "source-map-support": { + "version": "0.4.18", + "dev": true, + "requires": { + "source-map": "^0.5.6" + } + } + } + }, + "babel-runtime": { + "version": "6.26.0", + "dev": true, + "requires": { + "core-js": "^2.4.0", + "regenerator-runtime": "^0.11.0" + } + }, + "babel-template": { + "version": "6.26.0", + "dev": true, + "requires": { + "babel-runtime": "^6.26.0", + "babel-traverse": "^6.26.0", + "babel-types": "^6.26.0", + "babylon": "^6.18.0", + "lodash": "^4.17.4" + } + }, + "babel-traverse": { + "version": "6.26.0", + "dev": true, + "requires": { + "babel-code-frame": "^6.26.0", + "babel-messages": "^6.23.0", + "babel-runtime": "^6.26.0", + "babel-types": "^6.26.0", + "babylon": "^6.18.0", + "debug": "^2.6.8", + "globals": "^9.18.0", + "invariant": "^2.2.2", + "lodash": "^4.17.4" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "globals": { + "version": "9.18.0", + "dev": true + }, + "ms": { + "version": "2.0.0", + "dev": true + } + } + }, + "babel-types": { + "version": "6.26.0", + "dev": true, + "requires": { + "babel-runtime": "^6.26.0", + "esutils": "^2.0.2", + "lodash": "^4.17.4", + "to-fast-properties": "^1.0.3" + }, + "dependencies": { + "to-fast-properties": { + "version": "1.0.3", + "dev": true + } + } + }, + "babelify": { + "version": "7.3.0", + "dev": true, + "requires": { + "babel-core": "^6.0.14", + "object-assign": "^4.0.0" + } + }, + "babylon": { + "version": "6.18.0", + "dev": true + }, + "backoff": { + "version": "2.5.0", + "dev": true, + "requires": { + "precond": "0.2" + } + }, + "balanced-match": { + "version": "1.0.0", + "dev": true + }, + "base": { + "version": "0.11.2", + "dev": true, + "requires": { + "cache-base": "^1.0.1", + "class-utils": "^0.3.5", + "component-emitter": "^1.2.1", + "define-property": "^1.0.0", + "isobject": "^3.0.1", + "mixin-deep": "^1.2.0", + "pascalcase": "^0.1.1" + }, + "dependencies": { + "define-property": { + "version": "1.0.0", + "dev": true, + "requires": { + "is-descriptor": "^1.0.0" + } + } + } + }, + "base-x": { + "version": "3.0.8", + "dev": true, + "requires": { + "safe-buffer": "^5.0.1" + } + }, + "base64-js": { + "version": "1.5.1", + "dev": true + }, + "bcrypt-pbkdf": { + "version": "1.0.2", + "dev": true, + "requires": { + "tweetnacl": "^0.14.3" + }, + "dependencies": { + "tweetnacl": { + "version": "0.14.5", + "dev": true + } + } + }, + "bignumber.js": { + "version": "9.0.1", + "dev": true, + "optional": true + }, + "bip39": { + "version": "2.5.0", + "dev": true, + "requires": { + "create-hash": "^1.1.0", + "pbkdf2": "^3.0.9", + "randombytes": "^2.0.1", + "safe-buffer": "^5.0.1", + "unorm": "^1.3.3" + } + }, + "blakejs": { + "version": "1.1.0", + "dev": true + }, + "bluebird": { + "version": "3.7.2", + "dev": true, + "optional": true + }, + "bn.js": { + "version": "4.11.9", + "dev": true + }, + "body-parser": { + "version": "1.19.0", + "dev": true, + "optional": true, + "requires": { + "bytes": "3.1.0", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "~1.1.2", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", + "on-finished": "~2.3.0", + "qs": "6.7.0", + "raw-body": "2.4.0", + "type-is": "~1.6.17" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "dev": true, + "optional": true, + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "dev": true, + "optional": true + }, + "qs": { + "version": "6.7.0", + "dev": true, + "optional": true + } + } + }, + "brace-expansion": { + "version": "1.1.11", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "brorand": { + "version": "1.1.0", + "dev": true + }, + "browserify-aes": { + "version": "1.2.0", + "dev": true, + "requires": { + "buffer-xor": "^1.0.3", + "cipher-base": "^1.0.0", + "create-hash": "^1.1.0", + "evp_bytestokey": "^1.0.3", + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "browserify-cipher": { + "version": "1.0.1", + "dev": true, + "optional": true, + "requires": { + "browserify-aes": "^1.0.4", + "browserify-des": "^1.0.0", + "evp_bytestokey": "^1.0.0" + } + }, + "browserify-des": { + "version": "1.0.2", + "dev": true, + "optional": true, + "requires": { + "cipher-base": "^1.0.1", + "des.js": "^1.0.0", + "inherits": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "browserify-rsa": { + "version": "4.1.0", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^5.0.0", + "randombytes": "^2.0.1" + }, + "dependencies": { + "bn.js": { + "version": "5.1.3", + "dev": true, + "optional": true + } + } + }, + "browserify-sign": { + "version": "4.2.1", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^5.1.1", + "browserify-rsa": "^4.0.1", + "create-hash": "^1.2.0", + "create-hmac": "^1.1.7", + "elliptic": "^6.5.3", + "inherits": "^2.0.4", + "parse-asn1": "^5.1.5", + "readable-stream": "^3.6.0", + "safe-buffer": "^5.2.0" + }, + "dependencies": { + "bn.js": { + "version": "5.1.3", + "dev": true, + "optional": true + }, + "readable-stream": { + "version": "3.6.0", + "dev": true, + "optional": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, + "browserslist": { + "version": "3.2.8", + "dev": true, + "requires": { + "caniuse-lite": "^1.0.30000844", + "electron-to-chromium": "^1.3.47" + } + }, + "bs58": { + "version": "4.0.1", + "dev": true, + "requires": { + "base-x": "^3.0.2" + } + }, + "bs58check": { + "version": "2.1.2", + "dev": true, + "requires": { + "bs58": "^4.0.0", + "create-hash": "^1.1.0", + "safe-buffer": "^5.1.2" + } + }, + "buffer": { + "version": "5.7.1", + "dev": true, + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "buffer-from": { + "version": "1.1.1", + "dev": true + }, + "buffer-to-arraybuffer": { + "version": "0.0.5", + "dev": true, + "optional": true + }, + "buffer-xor": { + "version": "1.0.3", + "dev": true + }, + "bufferutil": { + "version": "4.0.3", + "dev": true, + "requires": { + "node-gyp-build": "^4.2.0" + } + }, + "bytes": { + "version": "3.1.0", + "dev": true, + "optional": true + }, + "bytewise": { + "version": "1.1.0", + "dev": true, + "requires": { + "bytewise-core": "^1.2.2", + "typewise": "^1.0.3" + } + }, + "bytewise-core": { + "version": "1.2.3", + "dev": true, + "requires": { + "typewise-core": "^1.2" + } + }, + "cache-base": { + "version": "1.0.1", + "dev": true, + "requires": { + "collection-visit": "^1.0.0", + "component-emitter": "^1.2.1", + "get-value": "^2.0.6", + "has-value": "^1.0.0", + "isobject": "^3.0.1", + "set-value": "^2.0.0", + "to-object-path": "^0.3.0", + "union-value": "^1.0.0", + "unset-value": "^1.0.0" + } + }, + "cacheable-request": { + "version": "6.1.0", + "dev": true, + "optional": true, + "requires": { + "clone-response": "^1.0.2", + "get-stream": "^5.1.0", + "http-cache-semantics": "^4.0.0", + "keyv": "^3.0.0", + "lowercase-keys": "^2.0.0", + "normalize-url": "^4.1.0", + "responselike": "^1.0.2" + }, + "dependencies": { + "lowercase-keys": { + "version": "2.0.0", + "dev": true, + "optional": true + } + } + }, + "cachedown": { + "version": "1.0.0", + "dev": true, + "requires": { + "abstract-leveldown": "^2.4.1", + "lru-cache": "^3.2.0" + }, + "dependencies": { + "abstract-leveldown": { + "version": "2.7.2", + "dev": true, + "requires": { + "xtend": "~4.0.0" + } + }, + "lru-cache": { + "version": "3.2.0", + "dev": true, + "requires": { + "pseudomap": "^1.0.1" + } + } + } + }, + "call-bind": { + "version": "1.0.2", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + } + }, + "caniuse-lite": { + "version": "1.0.30001174", + "dev": true + }, + "caseless": { + "version": "0.12.0", + "dev": true + }, + "chalk": { + "version": "2.4.2", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "checkpoint-store": { + "version": "1.1.0", + "dev": true, + "requires": { + "functional-red-black-tree": "^1.0.1" + } + }, + "chownr": { + "version": "1.1.4", + "dev": true, + "optional": true + }, + "ci-info": { + "version": "2.0.0", + "dev": true + }, + "cids": { + "version": "0.7.5", + "dev": true, + "optional": true, + "requires": { + "buffer": "^5.5.0", + "class-is": "^1.1.0", + "multibase": "~0.6.0", + "multicodec": "^1.0.0", + "multihashes": "~0.4.15" + }, + "dependencies": { + "multicodec": { + "version": "1.0.4", + "dev": true, + "optional": true, + "requires": { + "buffer": "^5.6.0", + "varint": "^5.0.0" + } + } + } + }, + "cipher-base": { + "version": "1.0.4", + "dev": true, + "requires": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "class-is": { + "version": "1.1.0", + "dev": true, + "optional": true + }, + "class-utils": { + "version": "0.3.6", + "dev": true, + "requires": { + "arr-union": "^3.1.0", + "define-property": "^0.2.5", + "isobject": "^3.0.0", + "static-extend": "^0.1.1" + }, + "dependencies": { + "define-property": { + "version": "0.2.5", + "dev": true, + "requires": { + "is-descriptor": "^0.1.0" + } + }, + "is-accessor-descriptor": { + "version": "0.1.6", + "dev": true, + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "dev": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "is-buffer": { + "version": "1.1.6", + "dev": true + }, + "is-data-descriptor": { + "version": "0.1.4", + "dev": true, + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "dev": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "is-descriptor": { + "version": "0.1.6", + "dev": true, + "requires": { + "is-accessor-descriptor": "^0.1.6", + "is-data-descriptor": "^0.1.4", + "kind-of": "^5.0.0" + } + }, + "kind-of": { + "version": "5.1.0", + "dev": true + } + } + }, + "clone": { + "version": "2.1.2", + "dev": true + }, + "clone-response": { + "version": "1.0.2", + "dev": true, + "optional": true, + "requires": { + "mimic-response": "^1.0.0" + } + }, + "collection-visit": { + "version": "1.0.0", + "dev": true, + "requires": { + "map-visit": "^1.0.0", + "object-visit": "^1.0.0" + } + }, + "color-convert": { + "version": "1.9.3", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "dev": true + }, + "combined-stream": { + "version": "1.0.8", + "dev": true, + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "component-emitter": { + "version": "1.3.0", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "dev": true + }, + "concat-stream": { + "version": "1.6.2", + "dev": true, + "requires": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + } + }, + "content-disposition": { + "version": "0.5.3", + "dev": true, + "optional": true, + "requires": { + "safe-buffer": "5.1.2" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "dev": true, + "optional": true + } + } + }, + "content-hash": { + "version": "2.5.2", + "dev": true, + "optional": true, + "requires": { + "cids": "^0.7.1", + "multicodec": "^0.5.5", + "multihashes": "^0.4.15" + } + }, + "content-type": { + "version": "1.0.4", + "dev": true, + "optional": true + }, + "convert-source-map": { + "version": "1.7.0", + "dev": true, + "requires": { + "safe-buffer": "~5.1.1" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "dev": true + } + } + }, + "cookie": { + "version": "0.4.0", + "dev": true, + "optional": true + }, + "cookie-signature": { + "version": "1.0.6", + "dev": true, + "optional": true + }, + "cookiejar": { + "version": "2.1.2", + "dev": true, + "optional": true + }, + "copy-descriptor": { + "version": "0.1.1", + "dev": true + }, + "core-js": { + "version": "2.6.12", + "dev": true + }, + "core-js-pure": { + "version": "3.8.2", + "dev": true + }, + "core-util-is": { + "version": "1.0.2", + "dev": true + }, + "cors": { + "version": "2.8.5", + "dev": true, + "optional": true, + "requires": { + "object-assign": "^4", + "vary": "^1" + } + }, + "create-ecdh": { + "version": "4.0.4", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^4.1.0", + "elliptic": "^6.5.3" + } + }, + "create-hash": { + "version": "1.2.0", + "dev": true, + "requires": { + "cipher-base": "^1.0.1", + "inherits": "^2.0.1", + "md5.js": "^1.3.4", + "ripemd160": "^2.0.1", + "sha.js": "^2.4.0" + } + }, + "create-hmac": { + "version": "1.1.7", + "dev": true, + "requires": { + "cipher-base": "^1.0.3", + "create-hash": "^1.1.0", + "inherits": "^2.0.1", + "ripemd160": "^2.0.0", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + } + }, + "cross-fetch": { + "version": "2.2.3", + "dev": true, + "requires": { + "node-fetch": "2.1.2", + "whatwg-fetch": "2.0.4" + } + }, + "crypto-browserify": { + "version": "3.12.0", + "dev": true, + "optional": true, + "requires": { + "browserify-cipher": "^1.0.0", + "browserify-sign": "^4.0.0", + "create-ecdh": "^4.0.0", + "create-hash": "^1.1.0", + "create-hmac": "^1.1.0", + "diffie-hellman": "^5.0.0", + "inherits": "^2.0.1", + "pbkdf2": "^3.0.3", + "public-encrypt": "^4.0.0", + "randombytes": "^2.0.0", + "randomfill": "^1.0.3" + } + }, + "d": { + "version": "1.0.1", + "dev": true, + "requires": { + "es5-ext": "^0.10.50", + "type": "^1.0.1" + } + }, + "dashdash": { + "version": "1.14.1", + "dev": true, + "requires": { + "assert-plus": "^1.0.0" + } + }, + "debug": { + "version": "3.2.6", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "decode-uri-component": { + "version": "0.2.0", + "dev": true + }, + "decompress-response": { + "version": "3.3.0", + "dev": true, + "optional": true, + "requires": { + "mimic-response": "^1.0.0" + } + }, + "deep-equal": { + "version": "1.1.1", + "dev": true, + "requires": { + "is-arguments": "^1.0.4", + "is-date-object": "^1.0.1", + "is-regex": "^1.0.4", + "object-is": "^1.0.1", + "object-keys": "^1.1.1", + "regexp.prototype.flags": "^1.2.0" + } + }, + "defer-to-connect": { + "version": "1.1.3", + "dev": true, + "optional": true + }, + "deferred-leveldown": { + "version": "4.0.2", + "dev": true, + "requires": { + "abstract-leveldown": "~5.0.0", + "inherits": "^2.0.3" + }, + "dependencies": { + "abstract-leveldown": { + "version": "5.0.0", + "dev": true, + "requires": { + "xtend": "~4.0.0" + } + } + } + }, + "define-properties": { + "version": "1.1.3", + "dev": true, + "requires": { + "object-keys": "^1.0.12" + } + }, + "define-property": { + "version": "2.0.2", + "dev": true, + "requires": { + "is-descriptor": "^1.0.2", + "isobject": "^3.0.1" + } + }, + "defined": { + "version": "1.0.0", + "dev": true + }, + "delayed-stream": { + "version": "1.0.0", + "dev": true + }, + "depd": { + "version": "1.1.2", + "dev": true, + "optional": true + }, + "des.js": { + "version": "1.0.1", + "dev": true, + "optional": true, + "requires": { + "inherits": "^2.0.1", + "minimalistic-assert": "^1.0.0" + } + }, + "destroy": { + "version": "1.0.4", + "dev": true, + "optional": true + }, + "detect-indent": { + "version": "4.0.0", + "dev": true, + "requires": { + "repeating": "^2.0.0" + } + }, + "diffie-hellman": { + "version": "5.0.3", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^4.1.0", + "miller-rabin": "^4.0.0", + "randombytes": "^2.0.0" + } + }, + "dom-walk": { + "version": "0.1.2", + "dev": true + }, + "dotignore": { + "version": "0.1.2", + "dev": true, + "requires": { + "minimatch": "^3.0.4" + } + }, + "duplexer3": { + "version": "0.1.4", + "dev": true, + "optional": true + }, + "ecc-jsbn": { + "version": "0.1.2", + "dev": true, + "requires": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } + }, + "ee-first": { + "version": "1.1.1", + "dev": true, + "optional": true + }, + "electron-to-chromium": { + "version": "1.3.636", + "dev": true + }, + "elliptic": { + "version": "6.5.3", + "dev": true, + "requires": { + "bn.js": "^4.4.0", + "brorand": "^1.0.1", + "hash.js": "^1.0.0", + "hmac-drbg": "^1.0.0", + "inherits": "^2.0.1", + "minimalistic-assert": "^1.0.0", + "minimalistic-crypto-utils": "^1.0.0" + } + }, + "encodeurl": { + "version": "1.0.2", + "dev": true, + "optional": true + }, + "encoding": { + "version": "0.1.13", + "dev": true, + "requires": { + "iconv-lite": "^0.6.2" + }, + "dependencies": { + "iconv-lite": { + "version": "0.6.2", + "dev": true, + "requires": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + } + } + } + }, + "encoding-down": { + "version": "5.0.4", + "dev": true, + "requires": { + "abstract-leveldown": "^5.0.0", + "inherits": "^2.0.3", + "level-codec": "^9.0.0", + "level-errors": "^2.0.0", + "xtend": "^4.0.1" + }, + "dependencies": { + "abstract-leveldown": { + "version": "5.0.0", + "dev": true, + "requires": { + "xtend": "~4.0.0" + } + } + } + }, + "end-of-stream": { + "version": "1.4.4", + "dev": true, + "requires": { + "once": "^1.4.0" + } + }, + "errno": { + "version": "0.1.8", + "dev": true, + "requires": { + "prr": "~1.0.1" + } + }, + "es-abstract": { + "version": "1.18.0-next.1", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-negative-zero": "^2.0.0", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "dev": true, + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "es5-ext": { + "version": "0.10.53", + "dev": true, + "requires": { + "es6-iterator": "~2.0.3", + "es6-symbol": "~3.1.3", + "next-tick": "~1.0.0" + } + }, + "es6-iterator": { + "version": "2.0.3", + "dev": true, + "requires": { + "d": "1", + "es5-ext": "^0.10.35", + "es6-symbol": "^3.1.1" + } + }, + "es6-symbol": { + "version": "3.1.3", + "dev": true, + "requires": { + "d": "^1.0.1", + "ext": "^1.1.2" + } + }, + "escape-html": { + "version": "1.0.3", + "dev": true, + "optional": true + }, + "escape-string-regexp": { + "version": "1.0.5", + "dev": true + }, + "esutils": { + "version": "2.0.3", + "dev": true + }, + "etag": { + "version": "1.8.1", + "dev": true, + "optional": true + }, + "eth-block-tracker": { + "version": "3.0.1", + "dev": true, + "requires": { + "eth-query": "^2.1.0", + "ethereumjs-tx": "^1.3.3", + "ethereumjs-util": "^5.1.3", + "ethjs-util": "^0.1.3", + "json-rpc-engine": "^3.6.0", + "pify": "^2.3.0", + "tape": "^4.6.3" + }, + "dependencies": { + "ethereumjs-tx": { + "version": "1.3.7", + "dev": true, + "requires": { + "ethereum-common": "^0.0.18", + "ethereumjs-util": "^5.0.0" + } + }, + "ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "pify": { + "version": "2.3.0", + "dev": true + } + } + }, + "eth-ens-namehash": { + "version": "2.0.8", + "dev": true, + "optional": true, + "requires": { + "idna-uts46-hx": "^2.3.1", + "js-sha3": "^0.5.7" + } + }, + "eth-json-rpc-infura": { + "version": "3.2.1", + "dev": true, + "requires": { + "cross-fetch": "^2.1.1", + "eth-json-rpc-middleware": "^1.5.0", + "json-rpc-engine": "^3.4.0", + "json-rpc-error": "^2.0.0" + } + }, + "eth-json-rpc-middleware": { + "version": "1.6.0", + "dev": true, + "requires": { + "async": "^2.5.0", + "eth-query": "^2.1.2", + "eth-tx-summary": "^3.1.2", + "ethereumjs-block": "^1.6.0", + "ethereumjs-tx": "^1.3.3", + "ethereumjs-util": "^5.1.2", + "ethereumjs-vm": "^2.1.0", + "fetch-ponyfill": "^4.0.0", + "json-rpc-engine": "^3.6.0", + "json-rpc-error": "^2.0.0", + "json-stable-stringify": "^1.0.1", + "promise-to-callback": "^1.0.0", + "tape": "^4.6.3" + }, + "dependencies": { + "abstract-leveldown": { + "version": "2.6.3", + "dev": true, + "requires": { + "xtend": "~4.0.0" + } + }, + "deferred-leveldown": { + "version": "1.2.2", + "dev": true, + "requires": { + "abstract-leveldown": "~2.6.0" + } + }, + "ethereumjs-account": { + "version": "2.0.5", + "dev": true, + "requires": { + "ethereumjs-util": "^5.0.0", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "ethereumjs-block": { + "version": "1.7.1", + "dev": true, + "requires": { + "async": "^2.0.1", + "ethereum-common": "0.2.0", + "ethereumjs-tx": "^1.2.2", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + }, + "dependencies": { + "ethereum-common": { + "version": "0.2.0", + "dev": true + } + } + }, + "ethereumjs-tx": { + "version": "1.3.7", + "dev": true, + "requires": { + "ethereum-common": "^0.0.18", + "ethereumjs-util": "^5.0.0" + } + }, + "ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "ethereumjs-vm": { + "version": "2.6.0", + "dev": true, + "requires": { + "async": "^2.1.2", + "async-eventemitter": "^0.2.2", + "ethereumjs-account": "^2.0.3", + "ethereumjs-block": "~2.2.0", + "ethereumjs-common": "^1.1.0", + "ethereumjs-util": "^6.0.0", + "fake-merkle-patricia-tree": "^1.0.1", + "functional-red-black-tree": "^1.0.1", + "merkle-patricia-tree": "^2.3.2", + "rustbn.js": "~0.2.0", + "safe-buffer": "^5.1.1" + }, + "dependencies": { + "ethereumjs-block": { + "version": "2.2.2", + "dev": true, + "requires": { + "async": "^2.0.1", + "ethereumjs-common": "^1.5.0", + "ethereumjs-tx": "^2.1.1", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + }, + "dependencies": { + "ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + } + } + }, + "ethereumjs-tx": { + "version": "2.1.2", + "dev": true, + "requires": { + "ethereumjs-common": "^1.5.0", + "ethereumjs-util": "^6.0.0" + } + }, + "ethereumjs-util": { + "version": "6.2.1", + "dev": true, + "requires": { + "@types/bn.js": "^4.11.3", + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "0.1.6", + "rlp": "^2.2.3" + } + } + } + }, + "isarray": { + "version": "0.0.1", + "dev": true + }, + "level-codec": { + "version": "7.0.1", + "dev": true + }, + "level-errors": { + "version": "1.0.5", + "dev": true, + "requires": { + "errno": "~0.1.1" + } + }, + "level-iterator-stream": { + "version": "1.3.1", + "dev": true, + "requires": { + "inherits": "^2.0.1", + "level-errors": "^1.0.3", + "readable-stream": "^1.0.33", + "xtend": "^4.0.0" + }, + "dependencies": { + "readable-stream": { + "version": "1.1.14", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + } + } + }, + "level-ws": { + "version": "0.0.0", + "dev": true, + "requires": { + "readable-stream": "~1.0.15", + "xtend": "~2.1.1" + }, + "dependencies": { + "readable-stream": { + "version": "1.0.34", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "xtend": { + "version": "2.1.2", + "dev": true, + "requires": { + "object-keys": "~0.4.0" + } + } + } + }, + "levelup": { + "version": "1.3.9", + "dev": true, + "requires": { + "deferred-leveldown": "~1.2.1", + "level-codec": "~7.0.0", + "level-errors": "~1.0.3", + "level-iterator-stream": "~1.3.0", + "prr": "~1.0.1", + "semver": "~5.4.1", + "xtend": "~4.0.0" + } + }, + "ltgt": { + "version": "2.2.1", + "dev": true + }, + "memdown": { + "version": "1.4.1", + "dev": true, + "requires": { + "abstract-leveldown": "~2.7.1", + "functional-red-black-tree": "^1.0.1", + "immediate": "^3.2.3", + "inherits": "~2.0.1", + "ltgt": "~2.2.0", + "safe-buffer": "~5.1.1" + }, + "dependencies": { + "abstract-leveldown": { + "version": "2.7.2", + "dev": true, + "requires": { + "xtend": "~4.0.0" + } + } + } + }, + "merkle-patricia-tree": { + "version": "2.3.2", + "dev": true, + "requires": { + "async": "^1.4.2", + "ethereumjs-util": "^5.0.0", + "level-ws": "0.0.0", + "levelup": "^1.2.1", + "memdown": "^1.0.0", + "readable-stream": "^2.0.0", + "rlp": "^2.0.0", + "semaphore": ">=1.0.1" + }, + "dependencies": { + "async": { + "version": "1.5.2", + "dev": true + } + } + }, + "object-keys": { + "version": "0.4.0", + "dev": true + }, + "safe-buffer": { + "version": "5.1.2", + "dev": true + }, + "semver": { + "version": "5.4.1", + "dev": true + }, + "string_decoder": { + "version": "0.10.31", + "dev": true + } + } + }, + "eth-lib": { + "version": "0.1.29", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^4.11.6", + "elliptic": "^6.4.0", + "nano-json-stream-parser": "^0.1.2", + "servify": "^0.1.12", + "ws": "^3.0.0", + "xhr-request-promise": "^0.1.2" + } + }, + "eth-query": { + "version": "2.1.2", + "dev": true, + "requires": { + "json-rpc-random-id": "^1.0.0", + "xtend": "^4.0.1" + } + }, + "eth-sig-util": { + "version": "3.0.0", + "dev": true, + "requires": { + "buffer": "^5.2.1", + "elliptic": "^6.4.0", + "ethereumjs-abi": "0.6.5", + "ethereumjs-util": "^5.1.1", + "tweetnacl": "^1.0.0", + "tweetnacl-util": "^0.15.0" + }, + "dependencies": { + "ethereumjs-abi": { + "version": "0.6.5", + "dev": true, + "requires": { + "bn.js": "^4.10.0", + "ethereumjs-util": "^4.3.0" + }, + "dependencies": { + "ethereumjs-util": { + "version": "4.5.1", + "dev": true, + "requires": { + "bn.js": "^4.8.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.0.0" + } + } + } + }, + "ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + } + } + }, + "eth-tx-summary": { + "version": "3.2.4", + "dev": true, + "requires": { + "async": "^2.1.2", + "clone": "^2.0.0", + "concat-stream": "^1.5.1", + "end-of-stream": "^1.1.0", + "eth-query": "^2.0.2", + "ethereumjs-block": "^1.4.1", + "ethereumjs-tx": "^1.1.1", + "ethereumjs-util": "^5.0.1", + "ethereumjs-vm": "^2.6.0", + "through2": "^2.0.3" + }, + "dependencies": { + "abstract-leveldown": { + "version": "2.6.3", + "dev": true, + "requires": { + "xtend": "~4.0.0" + } + }, + "deferred-leveldown": { + "version": "1.2.2", + "dev": true, + "requires": { + "abstract-leveldown": "~2.6.0" + } + }, + "ethereumjs-account": { + "version": "2.0.5", + "dev": true, + "requires": { + "ethereumjs-util": "^5.0.0", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "ethereumjs-block": { + "version": "1.7.1", + "dev": true, + "requires": { + "async": "^2.0.1", + "ethereum-common": "0.2.0", + "ethereumjs-tx": "^1.2.2", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + }, + "dependencies": { + "ethereum-common": { + "version": "0.2.0", + "dev": true + } + } + }, + "ethereumjs-tx": { + "version": "1.3.7", + "dev": true, + "requires": { + "ethereum-common": "^0.0.18", + "ethereumjs-util": "^5.0.0" + } + }, + "ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "ethereumjs-vm": { + "version": "2.6.0", + "dev": true, + "requires": { + "async": "^2.1.2", + "async-eventemitter": "^0.2.2", + "ethereumjs-account": "^2.0.3", + "ethereumjs-block": "~2.2.0", + "ethereumjs-common": "^1.1.0", + "ethereumjs-util": "^6.0.0", + "fake-merkle-patricia-tree": "^1.0.1", + "functional-red-black-tree": "^1.0.1", + "merkle-patricia-tree": "^2.3.2", + "rustbn.js": "~0.2.0", + "safe-buffer": "^5.1.1" + }, + "dependencies": { + "ethereumjs-block": { + "version": "2.2.2", + "dev": true, + "requires": { + "async": "^2.0.1", + "ethereumjs-common": "^1.5.0", + "ethereumjs-tx": "^2.1.1", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + }, + "dependencies": { + "ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + } + } + }, + "ethereumjs-tx": { + "version": "2.1.2", + "dev": true, + "requires": { + "ethereumjs-common": "^1.5.0", + "ethereumjs-util": "^6.0.0" + } + }, + "ethereumjs-util": { + "version": "6.2.1", + "dev": true, + "requires": { + "@types/bn.js": "^4.11.3", + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "0.1.6", + "rlp": "^2.2.3" + } + } + } + }, + "isarray": { + "version": "0.0.1", + "dev": true + }, + "level-codec": { + "version": "7.0.1", + "dev": true + }, + "level-errors": { + "version": "1.0.5", + "dev": true, + "requires": { + "errno": "~0.1.1" + } + }, + "level-iterator-stream": { + "version": "1.3.1", + "dev": true, + "requires": { + "inherits": "^2.0.1", + "level-errors": "^1.0.3", + "readable-stream": "^1.0.33", + "xtend": "^4.0.0" + }, + "dependencies": { + "readable-stream": { + "version": "1.1.14", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + } + } + }, + "level-ws": { + "version": "0.0.0", + "dev": true, + "requires": { + "readable-stream": "~1.0.15", + "xtend": "~2.1.1" + }, + "dependencies": { + "readable-stream": { + "version": "1.0.34", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "xtend": { + "version": "2.1.2", + "dev": true, + "requires": { + "object-keys": "~0.4.0" + } + } + } + }, + "levelup": { + "version": "1.3.9", + "dev": true, + "requires": { + "deferred-leveldown": "~1.2.1", + "level-codec": "~7.0.0", + "level-errors": "~1.0.3", + "level-iterator-stream": "~1.3.0", + "prr": "~1.0.1", + "semver": "~5.4.1", + "xtend": "~4.0.0" + } + }, + "ltgt": { + "version": "2.2.1", + "dev": true + }, + "memdown": { + "version": "1.4.1", + "dev": true, + "requires": { + "abstract-leveldown": "~2.7.1", + "functional-red-black-tree": "^1.0.1", + "immediate": "^3.2.3", + "inherits": "~2.0.1", + "ltgt": "~2.2.0", + "safe-buffer": "~5.1.1" + }, + "dependencies": { + "abstract-leveldown": { + "version": "2.7.2", + "dev": true, + "requires": { + "xtend": "~4.0.0" + } + } + } + }, + "merkle-patricia-tree": { + "version": "2.3.2", + "dev": true, + "requires": { + "async": "^1.4.2", + "ethereumjs-util": "^5.0.0", + "level-ws": "0.0.0", + "levelup": "^1.2.1", + "memdown": "^1.0.0", + "readable-stream": "^2.0.0", + "rlp": "^2.0.0", + "semaphore": ">=1.0.1" + }, + "dependencies": { + "async": { + "version": "1.5.2", + "dev": true + } + } + }, + "object-keys": { + "version": "0.4.0", + "dev": true + }, + "safe-buffer": { + "version": "5.1.2", + "dev": true + }, + "semver": { + "version": "5.4.1", + "dev": true + }, + "string_decoder": { + "version": "0.10.31", + "dev": true + } + } + }, + "ethashjs": { + "version": "0.0.8", + "dev": true, + "requires": { + "async": "^2.1.2", + "buffer-xor": "^2.0.1", + "ethereumjs-util": "^7.0.2", + "miller-rabin": "^4.0.0" + }, + "dependencies": { + "bn.js": { + "version": "5.1.3", + "dev": true + }, + "buffer-xor": { + "version": "2.0.2", + "dev": true, + "requires": { + "safe-buffer": "^5.1.1" + } + }, + "ethereumjs-util": { + "version": "7.0.7", + "dev": true, + "requires": { + "@types/bn.js": "^4.11.3", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "0.1.6", + "rlp": "^2.2.4" + } + } + } + }, + "ethereum-bloom-filters": { + "version": "1.0.7", + "dev": true, + "optional": true, + "requires": { + "js-sha3": "^0.8.0" + }, + "dependencies": { + "js-sha3": { + "version": "0.8.0", + "dev": true, + "optional": true + } + } + }, + "ethereum-common": { + "version": "0.0.18", + "dev": true + }, + "ethereum-cryptography": { + "version": "0.1.3", + "dev": true, + "requires": { + "@types/pbkdf2": "^3.0.0", + "@types/secp256k1": "^4.0.1", + "blakejs": "^1.1.0", + "browserify-aes": "^1.2.0", + "bs58check": "^2.1.2", + "create-hash": "^1.2.0", + "create-hmac": "^1.1.7", + "hash.js": "^1.1.7", + "keccak": "^3.0.0", + "pbkdf2": "^3.0.17", + "randombytes": "^2.1.0", + "safe-buffer": "^5.1.2", + "scrypt-js": "^3.0.0", + "secp256k1": "^4.0.1", + "setimmediate": "^1.0.5" + } + }, + "ethereumjs-abi": { + "version": "0.6.8", + "dev": true, + "requires": { + "bn.js": "^4.11.8", + "ethereumjs-util": "^6.0.0" + } + }, + "ethereumjs-account": { + "version": "3.0.0", + "dev": true, + "requires": { + "ethereumjs-util": "^6.0.0", + "rlp": "^2.2.1", + "safe-buffer": "^5.1.1" + } + }, + "ethereumjs-block": { + "version": "2.2.2", + "dev": true, + "requires": { + "async": "^2.0.1", + "ethereumjs-common": "^1.5.0", + "ethereumjs-tx": "^2.1.1", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + }, + "dependencies": { + "abstract-leveldown": { + "version": "2.6.3", + "dev": true, + "requires": { + "xtend": "~4.0.0" + } + }, + "deferred-leveldown": { + "version": "1.2.2", + "dev": true, + "requires": { + "abstract-leveldown": "~2.6.0" + } + }, + "ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "isarray": { + "version": "0.0.1", + "dev": true + }, + "level-codec": { + "version": "7.0.1", + "dev": true + }, + "level-errors": { + "version": "1.0.5", + "dev": true, + "requires": { + "errno": "~0.1.1" + } + }, + "level-iterator-stream": { + "version": "1.3.1", + "dev": true, + "requires": { + "inherits": "^2.0.1", + "level-errors": "^1.0.3", + "readable-stream": "^1.0.33", + "xtend": "^4.0.0" + }, + "dependencies": { + "readable-stream": { + "version": "1.1.14", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + } + } + }, + "level-ws": { + "version": "0.0.0", + "dev": true, + "requires": { + "readable-stream": "~1.0.15", + "xtend": "~2.1.1" + }, + "dependencies": { + "readable-stream": { + "version": "1.0.34", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "xtend": { + "version": "2.1.2", + "dev": true, + "requires": { + "object-keys": "~0.4.0" + } + } + } + }, + "levelup": { + "version": "1.3.9", + "dev": true, + "requires": { + "deferred-leveldown": "~1.2.1", + "level-codec": "~7.0.0", + "level-errors": "~1.0.3", + "level-iterator-stream": "~1.3.0", + "prr": "~1.0.1", + "semver": "~5.4.1", + "xtend": "~4.0.0" + } + }, + "ltgt": { + "version": "2.2.1", + "dev": true + }, + "memdown": { + "version": "1.4.1", + "dev": true, + "requires": { + "abstract-leveldown": "~2.7.1", + "functional-red-black-tree": "^1.0.1", + "immediate": "^3.2.3", + "inherits": "~2.0.1", + "ltgt": "~2.2.0", + "safe-buffer": "~5.1.1" + }, + "dependencies": { + "abstract-leveldown": { + "version": "2.7.2", + "dev": true, + "requires": { + "xtend": "~4.0.0" + } + } + } + }, + "merkle-patricia-tree": { + "version": "2.3.2", + "dev": true, + "requires": { + "async": "^1.4.2", + "ethereumjs-util": "^5.0.0", + "level-ws": "0.0.0", + "levelup": "^1.2.1", + "memdown": "^1.0.0", + "readable-stream": "^2.0.0", + "rlp": "^2.0.0", + "semaphore": ">=1.0.1" + }, + "dependencies": { + "async": { + "version": "1.5.2", + "dev": true + } + } + }, + "object-keys": { + "version": "0.4.0", + "dev": true + }, + "safe-buffer": { + "version": "5.1.2", + "dev": true + }, + "semver": { + "version": "5.4.1", + "dev": true + }, + "string_decoder": { + "version": "0.10.31", + "dev": true + } + } + }, + "ethereumjs-blockchain": { + "version": "4.0.4", + "dev": true, + "requires": { + "async": "^2.6.1", + "ethashjs": "~0.0.7", + "ethereumjs-block": "~2.2.2", + "ethereumjs-common": "^1.5.0", + "ethereumjs-util": "^6.1.0", + "flow-stoplight": "^1.0.0", + "level-mem": "^3.0.1", + "lru-cache": "^5.1.1", + "rlp": "^2.2.2", + "semaphore": "^1.1.0" + } + }, + "ethereumjs-common": { + "version": "1.5.0", + "dev": true + }, + "ethereumjs-tx": { + "version": "2.1.2", + "dev": true, + "requires": { + "ethereumjs-common": "^1.5.0", + "ethereumjs-util": "^6.0.0" + } + }, + "ethereumjs-util": { + "version": "6.2.1", + "dev": true, + "requires": { + "@types/bn.js": "^4.11.3", + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "0.1.6", + "rlp": "^2.2.3" + } + }, + "ethereumjs-vm": { + "version": "4.2.0", + "dev": true, + "requires": { + "async": "^2.1.2", + "async-eventemitter": "^0.2.2", + "core-js-pure": "^3.0.1", + "ethereumjs-account": "^3.0.0", + "ethereumjs-block": "^2.2.2", + "ethereumjs-blockchain": "^4.0.3", + "ethereumjs-common": "^1.5.0", + "ethereumjs-tx": "^2.1.2", + "ethereumjs-util": "^6.2.0", + "fake-merkle-patricia-tree": "^1.0.1", + "functional-red-black-tree": "^1.0.1", + "merkle-patricia-tree": "^2.3.2", + "rustbn.js": "~0.2.0", + "safe-buffer": "^5.1.1", + "util.promisify": "^1.0.0" + }, + "dependencies": { + "abstract-leveldown": { + "version": "2.6.3", + "dev": true, + "requires": { + "xtend": "~4.0.0" + } + }, + "deferred-leveldown": { + "version": "1.2.2", + "dev": true, + "requires": { + "abstract-leveldown": "~2.6.0" + } + }, + "isarray": { + "version": "0.0.1", + "dev": true + }, + "level-codec": { + "version": "7.0.1", + "dev": true + }, + "level-errors": { + "version": "1.0.5", + "dev": true, + "requires": { + "errno": "~0.1.1" + } + }, + "level-iterator-stream": { + "version": "1.3.1", + "dev": true, + "requires": { + "inherits": "^2.0.1", + "level-errors": "^1.0.3", + "readable-stream": "^1.0.33", + "xtend": "^4.0.0" + }, + "dependencies": { + "readable-stream": { + "version": "1.1.14", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + } + } + }, + "level-ws": { + "version": "0.0.0", + "dev": true, + "requires": { + "readable-stream": "~1.0.15", + "xtend": "~2.1.1" + }, + "dependencies": { + "readable-stream": { + "version": "1.0.34", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "xtend": { + "version": "2.1.2", + "dev": true, + "requires": { + "object-keys": "~0.4.0" + } + } + } + }, + "levelup": { + "version": "1.3.9", + "dev": true, + "requires": { + "deferred-leveldown": "~1.2.1", + "level-codec": "~7.0.0", + "level-errors": "~1.0.3", + "level-iterator-stream": "~1.3.0", + "prr": "~1.0.1", + "semver": "~5.4.1", + "xtend": "~4.0.0" + } + }, + "ltgt": { + "version": "2.2.1", + "dev": true + }, + "memdown": { + "version": "1.4.1", + "dev": true, + "requires": { + "abstract-leveldown": "~2.7.1", + "functional-red-black-tree": "^1.0.1", + "immediate": "^3.2.3", + "inherits": "~2.0.1", + "ltgt": "~2.2.0", + "safe-buffer": "~5.1.1" + }, + "dependencies": { + "abstract-leveldown": { + "version": "2.7.2", + "dev": true, + "requires": { + "xtend": "~4.0.0" + } + } + } + }, + "merkle-patricia-tree": { + "version": "2.3.2", + "dev": true, + "requires": { + "async": "^1.4.2", + "ethereumjs-util": "^5.0.0", + "level-ws": "0.0.0", + "levelup": "^1.2.1", + "memdown": "^1.0.0", + "readable-stream": "^2.0.0", + "rlp": "^2.0.0", + "semaphore": ">=1.0.1" + }, + "dependencies": { + "async": { + "version": "1.5.2", + "dev": true + }, + "ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + } + } + }, + "object-keys": { + "version": "0.4.0", + "dev": true + }, + "safe-buffer": { + "version": "5.1.2", + "dev": true + }, + "semver": { + "version": "5.4.1", + "dev": true + }, + "string_decoder": { + "version": "0.10.31", + "dev": true + } + } + }, + "ethereumjs-wallet": { + "version": "0.6.5", + "dev": true, + "optional": true, + "requires": { + "aes-js": "^3.1.1", + "bs58check": "^2.1.2", + "ethereum-cryptography": "^0.1.3", + "ethereumjs-util": "^6.0.0", + "randombytes": "^2.0.6", + "safe-buffer": "^5.1.2", + "scryptsy": "^1.2.1", + "utf8": "^3.0.0", + "uuid": "^3.3.2" + } + }, + "ethjs-unit": { + "version": "0.1.6", + "dev": true, + "optional": true, + "requires": { + "bn.js": "4.11.6", + "number-to-bn": "1.7.0" + }, + "dependencies": { + "bn.js": { + "version": "4.11.6", + "dev": true, + "optional": true + } + } + }, + "ethjs-util": { + "version": "0.1.6", + "dev": true, + "requires": { + "is-hex-prefixed": "1.0.0", + "strip-hex-prefix": "1.0.0" + } + }, + "eventemitter3": { + "version": "4.0.4", + "dev": true, + "optional": true + }, + "events": { + "version": "3.2.0", + "dev": true + }, + "evp_bytestokey": { + "version": "1.0.3", + "dev": true, + "requires": { + "md5.js": "^1.3.4", + "safe-buffer": "^5.1.1" + } + }, + "expand-brackets": { + "version": "2.1.4", + "dev": true, + "requires": { + "debug": "^2.3.3", + "define-property": "^0.2.5", + "extend-shallow": "^2.0.1", + "posix-character-classes": "^0.1.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "define-property": { + "version": "0.2.5", + "dev": true, + "requires": { + "is-descriptor": "^0.1.0" + } + }, + "extend-shallow": { + "version": "2.0.1", + "dev": true, + "requires": { + "is-extendable": "^0.1.0" + } + }, + "is-accessor-descriptor": { + "version": "0.1.6", + "dev": true, + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "dev": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "is-buffer": { + "version": "1.1.6", + "dev": true + }, + "is-data-descriptor": { + "version": "0.1.4", + "dev": true, + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "dev": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "is-descriptor": { + "version": "0.1.6", + "dev": true, + "requires": { + "is-accessor-descriptor": "^0.1.6", + "is-data-descriptor": "^0.1.4", + "kind-of": "^5.0.0" + } + }, + "is-extendable": { + "version": "0.1.1", + "dev": true + }, + "kind-of": { + "version": "5.1.0", + "dev": true + }, + "ms": { + "version": "2.0.0", + "dev": true + } + } + }, + "express": { + "version": "4.17.1", + "dev": true, + "optional": true, + "requires": { + "accepts": "~1.3.7", + "array-flatten": "1.1.1", + "body-parser": "1.19.0", + "content-disposition": "0.5.3", + "content-type": "~1.0.4", + "cookie": "0.4.0", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "~1.1.2", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "~1.1.2", + "fresh": "0.5.2", + "merge-descriptors": "1.0.1", + "methods": "~1.1.2", + "on-finished": "~2.3.0", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.7", + "proxy-addr": "~2.0.5", + "qs": "6.7.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.1.2", + "send": "0.17.1", + "serve-static": "1.14.1", + "setprototypeof": "1.1.1", + "statuses": "~1.5.0", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "dev": true, + "optional": true, + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "dev": true, + "optional": true + }, + "qs": { + "version": "6.7.0", + "dev": true, + "optional": true + }, + "safe-buffer": { + "version": "5.1.2", + "dev": true, + "optional": true + } + } + }, + "ext": { + "version": "1.4.0", + "dev": true, + "requires": { + "type": "^2.0.0" + }, + "dependencies": { + "type": { + "version": "2.1.0", + "dev": true + } + } + }, + "extend": { + "version": "3.0.2", + "dev": true + }, + "extend-shallow": { + "version": "3.0.2", + "dev": true, + "requires": { + "assign-symbols": "^1.0.0", + "is-extendable": "^1.0.1" + } + }, + "extglob": { + "version": "2.0.4", + "dev": true, + "requires": { + "array-unique": "^0.3.2", + "define-property": "^1.0.0", + "expand-brackets": "^2.1.4", + "extend-shallow": "^2.0.1", + "fragment-cache": "^0.2.1", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "dependencies": { + "define-property": { + "version": "1.0.0", + "dev": true, + "requires": { + "is-descriptor": "^1.0.0" + } + }, + "extend-shallow": { + "version": "2.0.1", + "dev": true, + "requires": { + "is-extendable": "^0.1.0" + } + }, + "is-extendable": { + "version": "0.1.1", + "dev": true + } + } + }, + "extsprintf": { + "version": "1.3.0", + "dev": true + }, + "fake-merkle-patricia-tree": { + "version": "1.0.1", + "dev": true, + "requires": { + "checkpoint-store": "^1.1.0" + } + }, + "fast-deep-equal": { + "version": "3.1.3", + "dev": true + }, + "fast-json-stable-stringify": { + "version": "2.1.0", + "dev": true + }, + "fetch-ponyfill": { + "version": "4.1.0", + "dev": true, + "requires": { + "node-fetch": "~1.7.1" + }, + "dependencies": { + "is-stream": { + "version": "1.1.0", + "dev": true + }, + "node-fetch": { + "version": "1.7.3", + "dev": true, + "requires": { + "encoding": "^0.1.11", + "is-stream": "^1.0.1" + } + } + } + }, + "finalhandler": { + "version": "1.1.2", + "dev": true, + "optional": true, + "requires": { + "debug": "2.6.9", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "on-finished": "~2.3.0", + "parseurl": "~1.3.3", + "statuses": "~1.5.0", + "unpipe": "~1.0.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "dev": true, + "optional": true, + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "dev": true, + "optional": true + } + } + }, + "find-yarn-workspace-root": { + "version": "1.2.1", + "dev": true, + "requires": { + "fs-extra": "^4.0.3", + "micromatch": "^3.1.4" + }, + "dependencies": { + "braces": { + "version": "2.3.2", + "dev": true, + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "dev": true, + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "fill-range": { + "version": "4.0.0", + "dev": true, + "requires": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "dev": true, + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "fs-extra": { + "version": "4.0.3", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + } + }, + "is-buffer": { + "version": "1.1.6", + "dev": true + }, + "is-extendable": { + "version": "0.1.1", + "dev": true + }, + "is-number": { + "version": "3.0.0", + "dev": true, + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "dev": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "micromatch": { + "version": "3.1.10", + "dev": true, + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + } + }, + "to-regex-range": { + "version": "2.1.1", + "dev": true, + "requires": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + } + } + } + }, + "flow-stoplight": { + "version": "1.0.0", + "dev": true + }, + "for-each": { + "version": "0.3.3", + "dev": true, + "requires": { + "is-callable": "^1.1.3" + } + }, + "for-in": { + "version": "1.0.2", + "dev": true + }, + "forever-agent": { + "version": "0.6.1", + "dev": true + }, + "form-data": { + "version": "2.3.3", + "dev": true, + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, + "forwarded": { + "version": "0.1.2", + "dev": true, + "optional": true + }, + "fragment-cache": { + "version": "0.2.1", + "dev": true, + "requires": { + "map-cache": "^0.2.2" + } + }, + "fresh": { + "version": "0.5.2", + "dev": true, + "optional": true + }, + "fs-extra": { + "version": "7.0.1", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + } + }, + "fs.realpath": { + "version": "1.0.0", + "dev": true + }, + "function-bind": { + "version": "1.1.1", + "dev": true + }, + "functional-red-black-tree": { + "version": "1.0.1", + "dev": true + }, + "get-intrinsic": { + "version": "1.0.2", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1" + } + }, + "get-stream": { + "version": "5.2.0", + "dev": true, + "optional": true, + "requires": { + "pump": "^3.0.0" + } + }, + "get-value": { + "version": "2.0.6", + "dev": true + }, + "getpass": { + "version": "0.1.7", + "dev": true, + "requires": { + "assert-plus": "^1.0.0" + } + }, + "glob": { + "version": "7.1.3", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "global": { + "version": "4.4.0", + "dev": true, + "requires": { + "min-document": "^2.19.0", + "process": "^0.11.10" + } + }, + "got": { + "version": "9.6.0", + "dev": true, + "optional": true, + "requires": { + "@sindresorhus/is": "^0.14.0", + "@szmarczak/http-timer": "^1.1.2", + "cacheable-request": "^6.0.0", + "decompress-response": "^3.3.0", + "duplexer3": "^0.1.4", + "get-stream": "^4.1.0", + "lowercase-keys": "^1.0.1", + "mimic-response": "^1.0.1", + "p-cancelable": "^1.0.0", + "to-readable-stream": "^1.0.0", + "url-parse-lax": "^3.0.0" + }, + "dependencies": { + "get-stream": { + "version": "4.1.0", + "dev": true, + "optional": true, + "requires": { + "pump": "^3.0.0" + } + } + } + }, + "graceful-fs": { + "version": "4.2.4", + "dev": true + }, + "har-schema": { + "version": "2.0.0", + "dev": true + }, + "har-validator": { + "version": "5.1.5", + "dev": true, + "requires": { + "ajv": "^6.12.3", + "har-schema": "^2.0.0" + } + }, + "has": { + "version": "1.0.3", + "dev": true, + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-ansi": { + "version": "2.0.0", + "dev": true, + "requires": { + "ansi-regex": "^2.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "dev": true + } + } + }, + "has-flag": { + "version": "3.0.0", + "dev": true + }, + "has-symbol-support-x": { + "version": "1.4.2", + "dev": true, + "optional": true + }, + "has-symbols": { + "version": "1.0.1", + "dev": true + }, + "has-to-string-tag-x": { + "version": "1.4.1", + "dev": true, + "optional": true, + "requires": { + "has-symbol-support-x": "^1.4.1" + } + }, + "has-value": { + "version": "1.0.0", + "dev": true, + "requires": { + "get-value": "^2.0.6", + "has-values": "^1.0.0", + "isobject": "^3.0.0" + } + }, + "has-values": { + "version": "1.0.0", + "dev": true, + "requires": { + "is-number": "^3.0.0", + "kind-of": "^4.0.0" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "dev": true + }, + "is-number": { + "version": "3.0.0", + "dev": true, + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "dev": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "kind-of": { + "version": "4.0.0", + "dev": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "hash-base": { + "version": "3.1.0", + "dev": true, + "requires": { + "inherits": "^2.0.4", + "readable-stream": "^3.6.0", + "safe-buffer": "^5.2.0" + }, + "dependencies": { + "readable-stream": { + "version": "3.6.0", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, + "hash.js": { + "version": "1.1.7", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "minimalistic-assert": "^1.0.1" + } + }, + "heap": { + "version": "0.2.6", + "dev": true + }, + "hmac-drbg": { + "version": "1.0.1", + "dev": true, + "requires": { + "hash.js": "^1.0.3", + "minimalistic-assert": "^1.0.0", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "home-or-tmp": { + "version": "2.0.0", + "dev": true, + "requires": { + "os-homedir": "^1.0.0", + "os-tmpdir": "^1.0.1" + } + }, + "http-cache-semantics": { + "version": "4.1.0", + "dev": true, + "optional": true + }, + "http-errors": { + "version": "1.7.2", + "dev": true, + "optional": true, + "requires": { + "depd": "~1.1.2", + "inherits": "2.0.3", + "setprototypeof": "1.1.1", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.0" + }, + "dependencies": { + "inherits": { + "version": "2.0.3", + "dev": true, + "optional": true + } + } + }, + "http-https": { + "version": "1.0.0", + "dev": true, + "optional": true + }, + "http-signature": { + "version": "1.2.0", + "dev": true, + "requires": { + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + } + }, + "iconv-lite": { + "version": "0.4.24", + "dev": true, + "optional": true, + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } + }, + "idna-uts46-hx": { + "version": "2.3.1", + "dev": true, + "optional": true, + "requires": { + "punycode": "2.1.0" + }, + "dependencies": { + "punycode": { + "version": "2.1.0", + "dev": true, + "optional": true + } + } + }, + "ieee754": { + "version": "1.2.1", + "dev": true + }, + "immediate": { + "version": "3.2.3", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "dev": true + }, + "invariant": { + "version": "2.2.4", + "dev": true, + "requires": { + "loose-envify": "^1.0.0" + } + }, + "ipaddr.js": { + "version": "1.9.1", + "dev": true, + "optional": true + }, + "is-accessor-descriptor": { + "version": "1.0.0", + "dev": true, + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-arguments": { + "version": "1.1.0", + "dev": true, + "requires": { + "call-bind": "^1.0.0" + } + }, + "is-callable": { + "version": "1.2.2", + "dev": true + }, + "is-ci": { + "version": "2.0.0", + "dev": true, + "requires": { + "ci-info": "^2.0.0" + } + }, + "is-data-descriptor": { + "version": "1.0.0", + "dev": true, + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-date-object": { + "version": "1.0.2", + "dev": true + }, + "is-descriptor": { + "version": "1.0.2", + "dev": true, + "requires": { + "is-accessor-descriptor": "^1.0.0", + "is-data-descriptor": "^1.0.0", + "kind-of": "^6.0.2" + } + }, + "is-extendable": { + "version": "1.0.1", + "dev": true, + "requires": { + "is-plain-object": "^2.0.4" + } + }, + "is-finite": { + "version": "1.1.0", + "dev": true + }, + "is-fn": { + "version": "1.0.0", + "dev": true + }, + "is-function": { + "version": "1.0.2", + "dev": true + }, + "is-hex-prefixed": { + "version": "1.0.0", + "dev": true + }, + "is-negative-zero": { + "version": "2.0.1", + "dev": true + }, + "is-object": { + "version": "1.0.2", + "dev": true, + "optional": true + }, + "is-plain-obj": { + "version": "1.1.0", + "dev": true, + "optional": true + }, + "is-plain-object": { + "version": "2.0.4", + "dev": true, + "requires": { + "isobject": "^3.0.1" + } + }, + "is-regex": { + "version": "1.1.1", + "dev": true, + "requires": { + "has-symbols": "^1.0.1" + } + }, + "is-retry-allowed": { + "version": "1.2.0", + "dev": true, + "optional": true + }, + "is-symbol": { + "version": "1.0.3", + "dev": true, + "requires": { + "has-symbols": "^1.0.1" + } + }, + "is-typedarray": { + "version": "1.0.0", + "dev": true + }, + "is-windows": { + "version": "1.0.2", + "dev": true + }, + "isarray": { + "version": "1.0.0", + "dev": true + }, + "isexe": { + "version": "2.0.0", + "dev": true + }, + "isobject": { + "version": "3.0.1", + "dev": true + }, + "isstream": { + "version": "0.1.2", + "dev": true + }, + "isurl": { + "version": "1.0.0", + "dev": true, + "optional": true, + "requires": { + "has-to-string-tag-x": "^1.2.0", + "is-object": "^1.0.1" + } + }, + "js-sha3": { + "version": "0.5.7", + "dev": true, + "optional": true + }, + "js-tokens": { + "version": "4.0.0", + "dev": true + }, + "jsbn": { + "version": "0.1.1", + "dev": true + }, + "json-buffer": { + "version": "3.0.0", + "dev": true, + "optional": true + }, + "json-rpc-engine": { + "version": "3.8.0", + "dev": true, + "requires": { + "async": "^2.0.1", + "babel-preset-env": "^1.7.0", + "babelify": "^7.3.0", + "json-rpc-error": "^2.0.0", + "promise-to-callback": "^1.0.0", + "safe-event-emitter": "^1.0.1" + } + }, + "json-rpc-error": { + "version": "2.0.0", + "dev": true, + "requires": { + "inherits": "^2.0.1" + } + }, + "json-rpc-random-id": { + "version": "1.0.1", + "dev": true + }, + "json-schema": { + "version": "0.2.3", + "dev": true + }, + "json-schema-traverse": { + "version": "0.4.1", + "dev": true + }, + "json-stable-stringify": { + "version": "1.0.1", + "dev": true, + "requires": { + "jsonify": "~0.0.0" + } + }, + "json-stringify-safe": { + "version": "5.0.1", + "dev": true + }, + "jsonfile": { + "version": "4.0.0", + "dev": true, + "requires": { + "graceful-fs": "^4.1.6" + } + }, + "jsonify": { + "version": "0.0.0", + "dev": true + }, + "jsprim": { + "version": "1.4.1", + "dev": true, + "requires": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.2.3", + "verror": "1.10.0" + } + }, + "keccak": { + "version": "3.0.1", + "bundled": true, + "dev": true, + "requires": { + "node-addon-api": "^2.0.0", + "node-gyp-build": "^4.2.0" + } + }, + "keyv": { + "version": "3.1.0", + "dev": true, + "optional": true, + "requires": { + "json-buffer": "3.0.0" + } + }, + "kind-of": { + "version": "6.0.3", + "dev": true + }, + "klaw-sync": { + "version": "6.0.0", + "dev": true, + "requires": { + "graceful-fs": "^4.1.11" + } + }, + "level-codec": { + "version": "9.0.2", + "dev": true, + "requires": { + "buffer": "^5.6.0" + } + }, + "level-errors": { + "version": "2.0.1", + "dev": true, + "requires": { + "errno": "~0.1.1" + } + }, + "level-iterator-stream": { + "version": "2.0.3", + "dev": true, + "requires": { + "inherits": "^2.0.1", + "readable-stream": "^2.0.5", + "xtend": "^4.0.0" + } + }, + "level-mem": { + "version": "3.0.1", + "dev": true, + "requires": { + "level-packager": "~4.0.0", + "memdown": "~3.0.0" + }, + "dependencies": { + "abstract-leveldown": { + "version": "5.0.0", + "dev": true, + "requires": { + "xtend": "~4.0.0" + } + }, + "ltgt": { + "version": "2.2.1", + "dev": true + }, + "memdown": { + "version": "3.0.0", + "dev": true, + "requires": { + "abstract-leveldown": "~5.0.0", + "functional-red-black-tree": "~1.0.1", + "immediate": "~3.2.3", + "inherits": "~2.0.1", + "ltgt": "~2.2.0", + "safe-buffer": "~5.1.1" + } + }, + "safe-buffer": { + "version": "5.1.2", + "dev": true + } + } + }, + "level-packager": { + "version": "4.0.1", + "dev": true, + "requires": { + "encoding-down": "~5.0.0", + "levelup": "^3.0.0" + } + }, + "level-post": { + "version": "1.0.7", + "dev": true, + "requires": { + "ltgt": "^2.1.2" + } + }, + "level-sublevel": { + "version": "6.6.4", + "dev": true, + "requires": { + "bytewise": "~1.1.0", + "level-codec": "^9.0.0", + "level-errors": "^2.0.0", + "level-iterator-stream": "^2.0.3", + "ltgt": "~2.1.1", + "pull-defer": "^0.2.2", + "pull-level": "^2.0.3", + "pull-stream": "^3.6.8", + "typewiselite": "~1.0.0", + "xtend": "~4.0.0" + } + }, + "level-ws": { + "version": "1.0.0", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "readable-stream": "^2.2.8", + "xtend": "^4.0.1" + } + }, + "levelup": { + "version": "3.1.1", + "dev": true, + "requires": { + "deferred-leveldown": "~4.0.0", + "level-errors": "~2.0.0", + "level-iterator-stream": "~3.0.0", + "xtend": "~4.0.0" + }, + "dependencies": { + "level-iterator-stream": { + "version": "3.0.1", + "dev": true, + "requires": { + "inherits": "^2.0.1", + "readable-stream": "^2.3.6", + "xtend": "^4.0.0" + } + } + } + }, + "lodash": { + "version": "4.17.20", + "dev": true + }, + "looper": { + "version": "2.0.0", + "dev": true + }, + "loose-envify": { + "version": "1.4.0", + "dev": true, + "requires": { + "js-tokens": "^3.0.0 || ^4.0.0" + } + }, + "lowercase-keys": { + "version": "1.0.1", + "dev": true, + "optional": true + }, + "lru-cache": { + "version": "5.1.1", + "dev": true, + "requires": { + "yallist": "^3.0.2" + } + }, + "ltgt": { + "version": "2.1.3", + "dev": true + }, + "map-cache": { + "version": "0.2.2", + "dev": true + }, + "map-visit": { + "version": "1.0.0", + "dev": true, + "requires": { + "object-visit": "^1.0.0" + } + }, + "md5.js": { + "version": "1.3.5", + "dev": true, + "requires": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "media-typer": { + "version": "0.3.0", + "dev": true, + "optional": true + }, + "merge-descriptors": { + "version": "1.0.1", + "dev": true, + "optional": true + }, + "merkle-patricia-tree": { + "version": "3.0.0", + "dev": true, + "requires": { + "async": "^2.6.1", + "ethereumjs-util": "^5.2.0", + "level-mem": "^3.0.1", + "level-ws": "^1.0.0", + "readable-stream": "^3.0.6", + "rlp": "^2.0.0", + "semaphore": ">=1.0.1" + }, + "dependencies": { + "ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "readable-stream": { + "version": "3.6.0", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, + "methods": { + "version": "1.1.2", + "dev": true, + "optional": true + }, + "miller-rabin": { + "version": "4.0.1", + "dev": true, + "requires": { + "bn.js": "^4.0.0", + "brorand": "^1.0.1" + } + }, + "mime": { + "version": "1.6.0", + "dev": true, + "optional": true + }, + "mime-db": { + "version": "1.45.0", + "dev": true + }, + "mime-types": { + "version": "2.1.28", + "dev": true, + "requires": { + "mime-db": "1.45.0" + } + }, + "mimic-response": { + "version": "1.0.1", + "dev": true, + "optional": true + }, + "min-document": { + "version": "2.19.0", + "dev": true, + "requires": { + "dom-walk": "^0.1.0" + } + }, + "minimalistic-assert": { + "version": "1.0.1", + "dev": true + }, + "minimalistic-crypto-utils": { + "version": "1.0.1", + "dev": true + }, + "minimatch": { + "version": "3.0.4", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "1.2.5", + "dev": true + }, + "minizlib": { + "version": "1.3.3", + "dev": true, + "optional": true, + "requires": { + "minipass": "^2.9.0" + }, + "dependencies": { + "minipass": { + "version": "2.9.0", + "dev": true, + "optional": true, + "requires": { + "safe-buffer": "^5.1.2", + "yallist": "^3.0.0" + } + } + } + }, + "mixin-deep": { + "version": "1.3.2", + "dev": true, + "requires": { + "for-in": "^1.0.2", + "is-extendable": "^1.0.1" + } + }, + "mkdirp": { + "version": "0.5.5", + "dev": true, + "requires": { + "minimist": "^1.2.5" + } + }, + "mkdirp-promise": { + "version": "5.0.1", + "dev": true, + "optional": true, + "requires": { + "mkdirp": "*" + } + }, + "mock-fs": { + "version": "4.13.0", + "dev": true, + "optional": true + }, + "ms": { + "version": "2.1.3", + "dev": true + }, + "multibase": { + "version": "0.6.1", + "dev": true, + "optional": true, + "requires": { + "base-x": "^3.0.8", + "buffer": "^5.5.0" + } + }, + "multicodec": { + "version": "0.5.7", + "dev": true, + "optional": true, + "requires": { + "varint": "^5.0.0" + } + }, + "multihashes": { + "version": "0.4.21", + "dev": true, + "optional": true, + "requires": { + "buffer": "^5.5.0", + "multibase": "^0.7.0", + "varint": "^5.0.0" + }, + "dependencies": { + "multibase": { + "version": "0.7.0", + "dev": true, + "optional": true, + "requires": { + "base-x": "^3.0.8", + "buffer": "^5.5.0" + } + } + } + }, + "nano-json-stream-parser": { + "version": "0.1.2", + "dev": true, + "optional": true + }, + "nanomatch": { + "version": "1.2.13", + "dev": true, + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "fragment-cache": "^0.2.1", + "is-windows": "^1.0.2", + "kind-of": "^6.0.2", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + } + }, + "negotiator": { + "version": "0.6.2", + "dev": true, + "optional": true + }, + "next-tick": { + "version": "1.0.0", + "dev": true + }, + "nice-try": { + "version": "1.0.5", + "dev": true + }, + "node-addon-api": { + "version": "2.0.2", + "bundled": true, + "dev": true + }, + "node-fetch": { + "version": "2.1.2", + "dev": true + }, + "node-gyp-build": { + "version": "4.2.3", + "bundled": true, + "dev": true + }, + "normalize-url": { + "version": "4.5.0", + "dev": true, + "optional": true + }, + "number-to-bn": { + "version": "1.7.0", + "dev": true, + "optional": true, + "requires": { + "bn.js": "4.11.6", + "strip-hex-prefix": "1.0.0" + }, + "dependencies": { + "bn.js": { + "version": "4.11.6", + "dev": true, + "optional": true + } + } + }, + "oauth-sign": { + "version": "0.9.0", + "dev": true + }, + "object-assign": { + "version": "4.1.1", + "dev": true + }, + "object-copy": { + "version": "0.1.0", + "dev": true, + "requires": { + "copy-descriptor": "^0.1.0", + "define-property": "^0.2.5", + "kind-of": "^3.0.3" + }, + "dependencies": { + "define-property": { + "version": "0.2.5", + "dev": true, + "requires": { + "is-descriptor": "^0.1.0" + } + }, + "is-accessor-descriptor": { + "version": "0.1.6", + "dev": true, + "requires": { + "kind-of": "^3.0.2" + } + }, + "is-buffer": { + "version": "1.1.6", + "dev": true + }, + "is-data-descriptor": { + "version": "0.1.4", + "dev": true, + "requires": { + "kind-of": "^3.0.2" + } + }, + "is-descriptor": { + "version": "0.1.6", + "dev": true, + "requires": { + "is-accessor-descriptor": "^0.1.6", + "is-data-descriptor": "^0.1.4", + "kind-of": "^5.0.0" + }, + "dependencies": { + "kind-of": { + "version": "5.1.0", + "dev": true + } + } + }, + "kind-of": { + "version": "3.2.2", + "dev": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "object-inspect": { + "version": "1.9.0", + "dev": true + }, + "object-is": { + "version": "1.1.4", + "dev": true, + "requires": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" + } + }, + "object-keys": { + "version": "1.1.1", + "dev": true + }, + "object-visit": { + "version": "1.0.1", + "dev": true, + "requires": { + "isobject": "^3.0.0" + } + }, + "object.assign": { + "version": "4.1.2", + "dev": true, + "requires": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "has-symbols": "^1.0.1", + "object-keys": "^1.1.1" + } + }, + "object.getownpropertydescriptors": { + "version": "2.1.1", + "dev": true, + "requires": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "es-abstract": "^1.18.0-next.1" + } + }, + "object.pick": { + "version": "1.3.0", + "dev": true, + "requires": { + "isobject": "^3.0.1" + } + }, + "oboe": { + "version": "2.1.4", + "dev": true, + "optional": true, + "requires": { + "http-https": "^1.0.0" + } + }, + "on-finished": { + "version": "2.3.0", + "dev": true, + "optional": true, + "requires": { + "ee-first": "1.1.1" + } + }, + "once": { + "version": "1.4.0", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "os-homedir": { + "version": "1.0.2", + "dev": true + }, + "os-tmpdir": { + "version": "1.0.2", + "dev": true + }, + "p-cancelable": { + "version": "1.1.0", + "dev": true, + "optional": true + }, + "p-timeout": { + "version": "1.2.1", + "dev": true, + "optional": true, + "requires": { + "p-finally": "^1.0.0" + }, + "dependencies": { + "p-finally": { + "version": "1.0.0", + "dev": true, + "optional": true + } + } + }, + "parse-asn1": { + "version": "5.1.6", + "dev": true, + "optional": true, + "requires": { + "asn1.js": "^5.2.0", + "browserify-aes": "^1.0.0", + "evp_bytestokey": "^1.0.0", + "pbkdf2": "^3.0.3", + "safe-buffer": "^5.1.1" + } + }, + "parse-headers": { + "version": "2.0.3", + "dev": true + }, + "parseurl": { + "version": "1.3.3", + "dev": true, + "optional": true + }, + "pascalcase": { + "version": "0.1.1", + "dev": true + }, + "patch-package": { + "version": "6.2.2", + "dev": true, + "requires": { + "@yarnpkg/lockfile": "^1.1.0", + "chalk": "^2.4.2", + "cross-spawn": "^6.0.5", + "find-yarn-workspace-root": "^1.2.1", + "fs-extra": "^7.0.1", + "is-ci": "^2.0.0", + "klaw-sync": "^6.0.0", + "minimist": "^1.2.0", + "rimraf": "^2.6.3", + "semver": "^5.6.0", + "slash": "^2.0.0", + "tmp": "^0.0.33" + }, + "dependencies": { + "cross-spawn": { + "version": "6.0.5", + "dev": true, + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "path-key": { + "version": "2.0.1", + "dev": true + }, + "semver": { + "version": "5.7.1", + "dev": true + }, + "shebang-command": { + "version": "1.2.0", + "dev": true, + "requires": { + "shebang-regex": "^1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "dev": true + }, + "slash": { + "version": "2.0.0", + "dev": true + }, + "tmp": { + "version": "0.0.33", + "dev": true, + "requires": { + "os-tmpdir": "~1.0.2" + } + }, + "which": { + "version": "1.3.1", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + } + } + }, + "path-is-absolute": { + "version": "1.0.1", + "dev": true + }, + "path-parse": { + "version": "1.0.6", + "dev": true + }, + "path-to-regexp": { + "version": "0.1.7", + "dev": true, + "optional": true + }, + "pbkdf2": { + "version": "3.1.1", + "dev": true, + "requires": { + "create-hash": "^1.1.2", + "create-hmac": "^1.1.4", + "ripemd160": "^2.0.1", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + } + }, + "performance-now": { + "version": "2.1.0", + "dev": true + }, + "posix-character-classes": { + "version": "0.1.1", + "dev": true + }, + "precond": { + "version": "0.2.3", + "dev": true + }, + "prepend-http": { + "version": "2.0.0", + "dev": true, + "optional": true + }, + "private": { + "version": "0.1.8", + "dev": true + }, + "process": { + "version": "0.11.10", + "dev": true + }, + "process-nextick-args": { + "version": "2.0.1", + "dev": true + }, + "promise-to-callback": { + "version": "1.0.0", + "dev": true, + "requires": { + "is-fn": "^1.0.0", + "set-immediate-shim": "^1.0.1" + } + }, + "proxy-addr": { + "version": "2.0.6", + "dev": true, + "optional": true, + "requires": { + "forwarded": "~0.1.2", + "ipaddr.js": "1.9.1" + } + }, + "prr": { + "version": "1.0.1", + "dev": true + }, + "pseudomap": { + "version": "1.0.2", + "dev": true + }, + "psl": { + "version": "1.8.0", + "dev": true + }, + "public-encrypt": { + "version": "4.0.3", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^4.1.0", + "browserify-rsa": "^4.0.0", + "create-hash": "^1.1.0", + "parse-asn1": "^5.0.0", + "randombytes": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "pull-cat": { + "version": "1.1.11", + "dev": true + }, + "pull-defer": { + "version": "0.2.3", + "dev": true + }, + "pull-level": { + "version": "2.0.4", + "dev": true, + "requires": { + "level-post": "^1.0.7", + "pull-cat": "^1.1.9", + "pull-live": "^1.0.1", + "pull-pushable": "^2.0.0", + "pull-stream": "^3.4.0", + "pull-window": "^2.1.4", + "stream-to-pull-stream": "^1.7.1" + } + }, + "pull-live": { + "version": "1.0.1", + "dev": true, + "requires": { + "pull-cat": "^1.1.9", + "pull-stream": "^3.4.0" + } + }, + "pull-pushable": { + "version": "2.2.0", + "dev": true + }, + "pull-stream": { + "version": "3.6.14", + "dev": true + }, + "pull-window": { + "version": "2.1.4", + "dev": true, + "requires": { + "looper": "^2.0.0" + } + }, + "pump": { + "version": "3.0.0", + "dev": true, + "optional": true, + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "punycode": { + "version": "2.1.1", + "dev": true + }, + "qs": { + "version": "6.5.2", + "dev": true + }, + "query-string": { + "version": "5.1.1", + "dev": true, + "optional": true, + "requires": { + "decode-uri-component": "^0.2.0", + "object-assign": "^4.1.0", + "strict-uri-encode": "^1.0.0" + } + }, + "randombytes": { + "version": "2.1.0", + "dev": true, + "requires": { + "safe-buffer": "^5.1.0" + } + }, + "randomfill": { + "version": "1.0.4", + "dev": true, + "optional": true, + "requires": { + "randombytes": "^2.0.5", + "safe-buffer": "^5.1.0" + } + }, + "range-parser": { + "version": "1.2.1", + "dev": true, + "optional": true + }, + "raw-body": { + "version": "2.4.0", + "dev": true, + "optional": true, + "requires": { + "bytes": "3.1.0", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + } + }, + "readable-stream": { + "version": "2.3.7", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "dev": true + } + } + }, + "regenerate": { + "version": "1.4.2", + "dev": true + }, + "regenerator-runtime": { + "version": "0.11.1", + "dev": true + }, + "regenerator-transform": { + "version": "0.10.1", + "dev": true, + "requires": { + "babel-runtime": "^6.18.0", + "babel-types": "^6.19.0", + "private": "^0.1.6" + } + }, + "regex-not": { + "version": "1.0.2", + "dev": true, + "requires": { + "extend-shallow": "^3.0.2", + "safe-regex": "^1.1.0" + } + }, + "regexp.prototype.flags": { + "version": "1.3.0", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1" + }, + "dependencies": { + "es-abstract": { + "version": "1.17.7", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + } + } + } + }, + "regexpu-core": { + "version": "2.0.0", + "dev": true, + "requires": { + "regenerate": "^1.2.1", + "regjsgen": "^0.2.0", + "regjsparser": "^0.1.4" + } + }, + "regjsgen": { + "version": "0.2.0", + "dev": true + }, + "regjsparser": { + "version": "0.1.5", + "dev": true, + "requires": { + "jsesc": "~0.5.0" + }, + "dependencies": { + "jsesc": { + "version": "0.5.0", + "dev": true + } + } + }, + "repeat-element": { + "version": "1.1.3", + "dev": true + }, + "repeat-string": { + "version": "1.6.1", + "dev": true + }, + "repeating": { + "version": "2.0.1", + "dev": true, + "requires": { + "is-finite": "^1.0.0" + } + }, + "request": { + "version": "2.88.2", + "dev": true, + "requires": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.3", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.5.0", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + } + }, + "resolve-url": { + "version": "0.2.1", + "dev": true + }, + "responselike": { + "version": "1.0.2", + "dev": true, + "optional": true, + "requires": { + "lowercase-keys": "^1.0.0" + } + }, + "resumer": { + "version": "0.0.0", + "dev": true, + "requires": { + "through": "~2.3.4" + } + }, + "ret": { + "version": "0.1.15", + "dev": true + }, + "rimraf": { + "version": "2.6.3", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + }, + "ripemd160": { + "version": "2.0.2", + "dev": true, + "requires": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1" + } + }, + "rlp": { + "version": "2.2.6", + "dev": true, + "requires": { + "bn.js": "^4.11.1" + } + }, + "rustbn.js": { + "version": "0.2.0", + "dev": true + }, + "safe-buffer": { + "version": "5.2.1", + "dev": true + }, + "safe-event-emitter": { + "version": "1.0.1", + "dev": true, + "requires": { + "events": "^3.0.0" + } + }, + "safe-regex": { + "version": "1.1.0", + "dev": true, + "requires": { + "ret": "~0.1.10" + } + }, + "safer-buffer": { + "version": "2.1.2", + "dev": true + }, + "scrypt-js": { + "version": "3.0.1", + "dev": true + }, + "scryptsy": { + "version": "1.2.1", + "dev": true, + "optional": true, + "requires": { + "pbkdf2": "^3.0.3" + } + }, + "secp256k1": { + "version": "4.0.2", + "dev": true, + "requires": { + "elliptic": "^6.5.2", + "node-addon-api": "^2.0.0", + "node-gyp-build": "^4.2.0" + } + }, + "seedrandom": { + "version": "3.0.1", + "dev": true + }, + "semaphore": { + "version": "1.1.0", + "dev": true + }, + "send": { + "version": "0.17.1", + "dev": true, + "optional": true, + "requires": { + "debug": "2.6.9", + "depd": "~1.1.2", + "destroy": "~1.0.4", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "~1.7.2", + "mime": "1.6.0", + "ms": "2.1.1", + "on-finished": "~2.3.0", + "range-parser": "~1.2.1", + "statuses": "~1.5.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "dev": true, + "optional": true, + "requires": { + "ms": "2.0.0" + }, + "dependencies": { + "ms": { + "version": "2.0.0", + "dev": true, + "optional": true + } + } + }, + "ms": { + "version": "2.1.1", + "dev": true, + "optional": true + } + } + }, + "serve-static": { + "version": "1.14.1", + "dev": true, + "optional": true, + "requires": { + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.17.1" + } + }, + "servify": { + "version": "0.1.12", + "dev": true, + "optional": true, + "requires": { + "body-parser": "^1.16.0", + "cors": "^2.8.1", + "express": "^4.14.0", + "request": "^2.79.0", + "xhr": "^2.3.3" + } + }, + "set-immediate-shim": { + "version": "1.0.1", + "dev": true + }, + "set-value": { + "version": "2.0.1", + "dev": true, + "requires": { + "extend-shallow": "^2.0.1", + "is-extendable": "^0.1.1", + "is-plain-object": "^2.0.3", + "split-string": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "dev": true, + "requires": { + "is-extendable": "^0.1.0" + } + }, + "is-extendable": { + "version": "0.1.1", + "dev": true + } + } + }, + "setimmediate": { + "version": "1.0.5", + "dev": true + }, + "setprototypeof": { + "version": "1.1.1", + "dev": true, + "optional": true + }, + "sha.js": { + "version": "2.4.11", + "dev": true, + "requires": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "simple-concat": { + "version": "1.0.1", + "dev": true, + "optional": true + }, + "simple-get": { + "version": "2.8.1", + "dev": true, + "optional": true, + "requires": { + "decompress-response": "^3.3.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + } + }, + "snapdragon": { + "version": "0.8.2", + "dev": true, + "requires": { + "base": "^0.11.1", + "debug": "^2.2.0", + "define-property": "^0.2.5", + "extend-shallow": "^2.0.1", + "map-cache": "^0.2.2", + "source-map": "^0.5.6", + "source-map-resolve": "^0.5.0", + "use": "^3.1.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "define-property": { + "version": "0.2.5", + "dev": true, + "requires": { + "is-descriptor": "^0.1.0" + } + }, + "extend-shallow": { + "version": "2.0.1", + "dev": true, + "requires": { + "is-extendable": "^0.1.0" + } + }, + "is-accessor-descriptor": { + "version": "0.1.6", + "dev": true, + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "dev": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "is-buffer": { + "version": "1.1.6", + "dev": true + }, + "is-data-descriptor": { + "version": "0.1.4", + "dev": true, + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "dev": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "is-descriptor": { + "version": "0.1.6", + "dev": true, + "requires": { + "is-accessor-descriptor": "^0.1.6", + "is-data-descriptor": "^0.1.4", + "kind-of": "^5.0.0" + } + }, + "is-extendable": { + "version": "0.1.1", + "dev": true + }, + "kind-of": { + "version": "5.1.0", + "dev": true + }, + "ms": { + "version": "2.0.0", + "dev": true + } + } + }, + "snapdragon-node": { + "version": "2.1.1", + "dev": true, + "requires": { + "define-property": "^1.0.0", + "isobject": "^3.0.0", + "snapdragon-util": "^3.0.1" + }, + "dependencies": { + "define-property": { + "version": "1.0.0", + "dev": true, + "requires": { + "is-descriptor": "^1.0.0" + } + } + } + }, + "snapdragon-util": { + "version": "3.0.1", + "dev": true, + "requires": { + "kind-of": "^3.2.0" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "dev": true + }, + "kind-of": { + "version": "3.2.2", + "dev": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "source-map": { + "version": "0.5.7", + "dev": true + }, + "source-map-resolve": { + "version": "0.5.3", + "dev": true, + "requires": { + "atob": "^2.1.2", + "decode-uri-component": "^0.2.0", + "resolve-url": "^0.2.1", + "source-map-url": "^0.4.0", + "urix": "^0.1.0" + } + }, + "source-map-support": { + "version": "0.5.12", + "dev": true, + "requires": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "dev": true + } + } + }, + "source-map-url": { + "version": "0.4.0", + "dev": true + }, + "split-string": { + "version": "3.1.0", + "dev": true, + "requires": { + "extend-shallow": "^3.0.0" + } + }, + "sshpk": { + "version": "1.16.1", + "dev": true, + "requires": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + }, + "dependencies": { + "tweetnacl": { + "version": "0.14.5", + "dev": true + } + } + }, + "static-extend": { + "version": "0.1.2", + "dev": true, + "requires": { + "define-property": "^0.2.5", + "object-copy": "^0.1.0" + }, + "dependencies": { + "define-property": { + "version": "0.2.5", + "dev": true, + "requires": { + "is-descriptor": "^0.1.0" + } + }, + "is-accessor-descriptor": { + "version": "0.1.6", + "dev": true, + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "dev": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "is-buffer": { + "version": "1.1.6", + "dev": true + }, + "is-data-descriptor": { + "version": "0.1.4", + "dev": true, + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "dev": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "is-descriptor": { + "version": "0.1.6", + "dev": true, + "requires": { + "is-accessor-descriptor": "^0.1.6", + "is-data-descriptor": "^0.1.4", + "kind-of": "^5.0.0" + } + }, + "kind-of": { + "version": "5.1.0", + "dev": true + } + } + }, + "statuses": { + "version": "1.5.0", + "dev": true, + "optional": true + }, + "stream-to-pull-stream": { + "version": "1.7.3", + "dev": true, + "requires": { + "looper": "^3.0.0", + "pull-stream": "^3.2.3" + }, + "dependencies": { + "looper": { + "version": "3.0.0", + "dev": true + } + } + }, + "strict-uri-encode": { + "version": "1.1.0", + "dev": true, + "optional": true + }, + "string_decoder": { + "version": "1.1.1", + "dev": true, + "requires": { + "safe-buffer": "~5.1.0" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "dev": true + } + } + }, + "string.prototype.trim": { + "version": "1.2.3", + "dev": true, + "requires": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "es-abstract": "^1.18.0-next.1" + } + }, + "string.prototype.trimend": { + "version": "1.0.3", + "dev": true, + "requires": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" + } + }, + "string.prototype.trimstart": { + "version": "1.0.3", + "dev": true, + "requires": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" + } + }, + "strip-hex-prefix": { + "version": "1.0.0", + "dev": true, + "requires": { + "is-hex-prefixed": "1.0.0" + } + }, + "supports-color": { + "version": "5.5.0", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + }, + "swarm-js": { + "version": "0.1.40", + "dev": true, + "optional": true, + "requires": { + "bluebird": "^3.5.0", + "buffer": "^5.0.5", + "eth-lib": "^0.1.26", + "fs-extra": "^4.0.2", + "got": "^7.1.0", + "mime-types": "^2.1.16", + "mkdirp-promise": "^5.0.1", + "mock-fs": "^4.1.0", + "setimmediate": "^1.0.5", + "tar": "^4.0.2", + "xhr-request": "^1.0.1" + }, + "dependencies": { + "fs-extra": { + "version": "4.0.3", + "dev": true, + "optional": true, + "requires": { + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + } + }, + "get-stream": { + "version": "3.0.0", + "dev": true, + "optional": true + }, + "got": { + "version": "7.1.0", + "dev": true, + "optional": true, + "requires": { + "decompress-response": "^3.2.0", + "duplexer3": "^0.1.4", + "get-stream": "^3.0.0", + "is-plain-obj": "^1.1.0", + "is-retry-allowed": "^1.0.0", + "is-stream": "^1.0.0", + "isurl": "^1.0.0-alpha5", + "lowercase-keys": "^1.0.0", + "p-cancelable": "^0.3.0", + "p-timeout": "^1.1.1", + "safe-buffer": "^5.0.1", + "timed-out": "^4.0.0", + "url-parse-lax": "^1.0.0", + "url-to-options": "^1.0.1" + } + }, + "is-stream": { + "version": "1.1.0", + "dev": true, + "optional": true + }, + "p-cancelable": { + "version": "0.3.0", + "dev": true, + "optional": true + }, + "prepend-http": { + "version": "1.0.4", + "dev": true, + "optional": true + }, + "url-parse-lax": { + "version": "1.0.0", + "dev": true, + "optional": true, + "requires": { + "prepend-http": "^1.0.1" + } + } + } + }, + "tape": { + "version": "4.13.3", + "dev": true, + "requires": { + "deep-equal": "~1.1.1", + "defined": "~1.0.0", + "dotignore": "~0.1.2", + "for-each": "~0.3.3", + "function-bind": "~1.1.1", + "glob": "~7.1.6", + "has": "~1.0.3", + "inherits": "~2.0.4", + "is-regex": "~1.0.5", + "minimist": "~1.2.5", + "object-inspect": "~1.7.0", + "resolve": "~1.17.0", + "resumer": "~0.0.0", + "string.prototype.trim": "~1.2.1", + "through": "~2.3.8" + }, + "dependencies": { + "glob": { + "version": "7.1.6", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "is-regex": { + "version": "1.0.5", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "object-inspect": { + "version": "1.7.0", + "dev": true + }, + "resolve": { + "version": "1.17.0", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + } + } + }, + "tar": { + "version": "4.4.13", + "dev": true, + "optional": true, + "requires": { + "chownr": "^1.1.1", + "fs-minipass": "^1.2.5", + "minipass": "^2.8.6", + "minizlib": "^1.2.1", + "mkdirp": "^0.5.0", + "safe-buffer": "^5.1.2", + "yallist": "^3.0.3" + }, + "dependencies": { + "fs-minipass": { + "version": "1.2.7", + "dev": true, + "optional": true, + "requires": { + "minipass": "^2.6.0" + } + }, + "minipass": { + "version": "2.9.0", + "dev": true, + "optional": true, + "requires": { + "safe-buffer": "^5.1.2", + "yallist": "^3.0.0" + } + } + } + }, + "through": { + "version": "2.3.8", + "dev": true + }, + "through2": { + "version": "2.0.5", + "dev": true, + "requires": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + } + }, + "timed-out": { + "version": "4.0.1", + "dev": true, + "optional": true + }, + "tmp": { + "version": "0.1.0", + "dev": true, + "requires": { + "rimraf": "^2.6.3" + } + }, + "to-object-path": { + "version": "0.3.0", + "dev": true, + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "dev": true + }, + "kind-of": { + "version": "3.2.2", + "dev": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "to-readable-stream": { + "version": "1.0.0", + "dev": true, + "optional": true + }, + "to-regex": { + "version": "3.0.2", + "dev": true, + "requires": { + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "regex-not": "^1.0.2", + "safe-regex": "^1.1.0" + } + }, + "toidentifier": { + "version": "1.0.0", + "dev": true, + "optional": true + }, + "tough-cookie": { + "version": "2.5.0", + "dev": true, + "requires": { + "psl": "^1.1.28", + "punycode": "^2.1.1" + } + }, + "trim-right": { + "version": "1.0.1", + "dev": true + }, + "tunnel-agent": { + "version": "0.6.0", + "dev": true, + "requires": { + "safe-buffer": "^5.0.1" + } + }, + "tweetnacl": { + "version": "1.0.3", + "dev": true + }, + "tweetnacl-util": { + "version": "0.15.1", + "dev": true + }, + "type": { + "version": "1.2.0", + "dev": true + }, + "type-is": { + "version": "1.6.18", + "dev": true, + "optional": true, + "requires": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + } + }, + "typedarray": { + "version": "0.0.6", + "dev": true + }, + "typedarray-to-buffer": { + "version": "3.1.5", + "dev": true, + "requires": { + "is-typedarray": "^1.0.0" + } + }, + "typewise": { + "version": "1.0.3", + "dev": true, + "requires": { + "typewise-core": "^1.2.0" + } + }, + "typewise-core": { + "version": "1.2.0", + "dev": true + }, + "typewiselite": { + "version": "1.0.0", + "dev": true + }, + "ultron": { + "version": "1.1.1", + "dev": true, + "optional": true + }, + "underscore": { + "version": "1.9.1", + "dev": true, + "optional": true + }, + "union-value": { + "version": "1.0.1", + "dev": true, + "requires": { + "arr-union": "^3.1.0", + "get-value": "^2.0.6", + "is-extendable": "^0.1.1", + "set-value": "^2.0.1" + }, + "dependencies": { + "is-extendable": { + "version": "0.1.1", + "dev": true + } + } + }, + "universalify": { + "version": "0.1.2", + "dev": true + }, + "unorm": { + "version": "1.6.0", + "dev": true + }, + "unpipe": { + "version": "1.0.0", + "dev": true, + "optional": true + }, + "unset-value": { + "version": "1.0.0", + "dev": true, + "requires": { + "has-value": "^0.3.1", + "isobject": "^3.0.0" + }, + "dependencies": { + "has-value": { + "version": "0.3.1", + "dev": true, + "requires": { + "get-value": "^2.0.3", + "has-values": "^0.1.4", + "isobject": "^2.0.0" + }, + "dependencies": { + "isobject": { + "version": "2.1.0", + "dev": true, + "requires": { + "isarray": "1.0.0" + } + } + } + }, + "has-values": { + "version": "0.1.4", + "dev": true + } + } + }, + "uri-js": { + "version": "4.4.1", + "dev": true, + "requires": { + "punycode": "^2.1.0" + } + }, + "urix": { + "version": "0.1.0", + "dev": true + }, + "url-parse-lax": { + "version": "3.0.0", + "dev": true, + "optional": true, + "requires": { + "prepend-http": "^2.0.0" + } + }, + "url-set-query": { + "version": "1.0.0", + "dev": true, + "optional": true + }, + "url-to-options": { + "version": "1.0.1", + "dev": true, + "optional": true + }, + "use": { + "version": "3.1.1", + "dev": true + }, + "utf-8-validate": { + "version": "5.0.4", + "dev": true, + "requires": { + "node-gyp-build": "^4.2.0" + } + }, + "utf8": { + "version": "3.0.0", + "dev": true, + "optional": true + }, + "util-deprecate": { + "version": "1.0.2", + "dev": true + }, + "util.promisify": { + "version": "1.1.1", + "dev": true, + "requires": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "for-each": "^0.3.3", + "has-symbols": "^1.0.1", + "object.getownpropertydescriptors": "^2.1.1" + } + }, + "utils-merge": { + "version": "1.0.1", + "dev": true, + "optional": true + }, + "uuid": { + "version": "3.4.0", + "dev": true + }, + "varint": { + "version": "5.0.2", + "dev": true, + "optional": true + }, + "vary": { + "version": "1.1.2", + "dev": true, + "optional": true + }, + "verror": { + "version": "1.10.0", + "dev": true, + "requires": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } + }, + "web3": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "web3-bzz": "1.2.11", + "web3-core": "1.2.11", + "web3-eth": "1.2.11", + "web3-eth-personal": "1.2.11", + "web3-net": "1.2.11", + "web3-shh": "1.2.11", + "web3-utils": "1.2.11" + } + }, + "web3-bzz": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "@types/node": "^12.12.6", + "got": "9.6.0", + "swarm-js": "^0.1.40", + "underscore": "1.9.1" + }, + "dependencies": { + "@types/node": { + "version": "12.19.12", + "dev": true, + "optional": true + } + } + }, + "web3-core": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "@types/bn.js": "^4.11.5", + "@types/node": "^12.12.6", + "bignumber.js": "^9.0.0", + "web3-core-helpers": "1.2.11", + "web3-core-method": "1.2.11", + "web3-core-requestmanager": "1.2.11", + "web3-utils": "1.2.11" + }, + "dependencies": { + "@types/node": { + "version": "12.19.12", + "dev": true, + "optional": true + } + } + }, + "web3-core-helpers": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "underscore": "1.9.1", + "web3-eth-iban": "1.2.11", + "web3-utils": "1.2.11" + } + }, + "web3-core-method": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/transactions": "^5.0.0-beta.135", + "underscore": "1.9.1", + "web3-core-helpers": "1.2.11", + "web3-core-promievent": "1.2.11", + "web3-core-subscriptions": "1.2.11", + "web3-utils": "1.2.11" + } + }, + "web3-core-promievent": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "eventemitter3": "4.0.4" + } + }, + "web3-core-requestmanager": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "underscore": "1.9.1", + "web3-core-helpers": "1.2.11", + "web3-providers-http": "1.2.11", + "web3-providers-ipc": "1.2.11", + "web3-providers-ws": "1.2.11" + } + }, + "web3-core-subscriptions": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "eventemitter3": "4.0.4", + "underscore": "1.9.1", + "web3-core-helpers": "1.2.11" + } + }, + "web3-eth": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "underscore": "1.9.1", + "web3-core": "1.2.11", + "web3-core-helpers": "1.2.11", + "web3-core-method": "1.2.11", + "web3-core-subscriptions": "1.2.11", + "web3-eth-abi": "1.2.11", + "web3-eth-accounts": "1.2.11", + "web3-eth-contract": "1.2.11", + "web3-eth-ens": "1.2.11", + "web3-eth-iban": "1.2.11", + "web3-eth-personal": "1.2.11", + "web3-net": "1.2.11", + "web3-utils": "1.2.11" + } + }, + "web3-eth-abi": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "@ethersproject/abi": "5.0.0-beta.153", + "underscore": "1.9.1", + "web3-utils": "1.2.11" + } + }, + "web3-eth-accounts": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "crypto-browserify": "3.12.0", + "eth-lib": "0.2.8", + "ethereumjs-common": "^1.3.2", + "ethereumjs-tx": "^2.1.1", + "scrypt-js": "^3.0.1", + "underscore": "1.9.1", + "uuid": "3.3.2", + "web3-core": "1.2.11", + "web3-core-helpers": "1.2.11", + "web3-core-method": "1.2.11", + "web3-utils": "1.2.11" + }, + "dependencies": { + "eth-lib": { + "version": "0.2.8", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^4.11.6", + "elliptic": "^6.4.0", + "xhr-request-promise": "^0.1.2" + } + }, + "uuid": { + "version": "3.3.2", + "dev": true, + "optional": true + } + } + }, + "web3-eth-contract": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "@types/bn.js": "^4.11.5", + "underscore": "1.9.1", + "web3-core": "1.2.11", + "web3-core-helpers": "1.2.11", + "web3-core-method": "1.2.11", + "web3-core-promievent": "1.2.11", + "web3-core-subscriptions": "1.2.11", + "web3-eth-abi": "1.2.11", + "web3-utils": "1.2.11" + } + }, + "web3-eth-ens": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "content-hash": "^2.5.2", + "eth-ens-namehash": "2.0.8", + "underscore": "1.9.1", + "web3-core": "1.2.11", + "web3-core-helpers": "1.2.11", + "web3-core-promievent": "1.2.11", + "web3-eth-abi": "1.2.11", + "web3-eth-contract": "1.2.11", + "web3-utils": "1.2.11" + } + }, + "web3-eth-iban": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^4.11.9", + "web3-utils": "1.2.11" + } + }, + "web3-eth-personal": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "@types/node": "^12.12.6", + "web3-core": "1.2.11", + "web3-core-helpers": "1.2.11", + "web3-core-method": "1.2.11", + "web3-net": "1.2.11", + "web3-utils": "1.2.11" + }, + "dependencies": { + "@types/node": { + "version": "12.19.12", + "dev": true, + "optional": true + } + } + }, + "web3-net": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "web3-core": "1.2.11", + "web3-core-method": "1.2.11", + "web3-utils": "1.2.11" + } + }, + "web3-provider-engine": { + "version": "14.2.1", + "dev": true, + "requires": { + "async": "^2.5.0", + "backoff": "^2.5.0", + "clone": "^2.0.0", + "cross-fetch": "^2.1.0", + "eth-block-tracker": "^3.0.0", + "eth-json-rpc-infura": "^3.1.0", + "eth-sig-util": "3.0.0", + "ethereumjs-block": "^1.2.2", + "ethereumjs-tx": "^1.2.0", + "ethereumjs-util": "^5.1.5", + "ethereumjs-vm": "^2.3.4", + "json-rpc-error": "^2.0.0", + "json-stable-stringify": "^1.0.1", + "promise-to-callback": "^1.0.0", + "readable-stream": "^2.2.9", + "request": "^2.85.0", + "semaphore": "^1.0.3", + "ws": "^5.1.1", + "xhr": "^2.2.0", + "xtend": "^4.0.1" + }, + "dependencies": { + "abstract-leveldown": { + "version": "2.6.3", + "dev": true, + "requires": { + "xtend": "~4.0.0" + } + }, + "deferred-leveldown": { + "version": "1.2.2", + "dev": true, + "requires": { + "abstract-leveldown": "~2.6.0" + } + }, + "eth-sig-util": { + "version": "1.4.2", + "dev": true, + "requires": { + "ethereumjs-abi": "git+https://github.com/ethereumjs/ethereumjs-abi.git", + "ethereumjs-util": "^5.1.1" + } + }, + "ethereumjs-account": { + "version": "2.0.5", + "dev": true, + "requires": { + "ethereumjs-util": "^5.0.0", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "ethereumjs-block": { + "version": "1.7.1", + "dev": true, + "requires": { + "async": "^2.0.1", + "ethereum-common": "0.2.0", + "ethereumjs-tx": "^1.2.2", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + }, + "dependencies": { + "ethereum-common": { + "version": "0.2.0", + "dev": true + } + } + }, + "ethereumjs-tx": { + "version": "1.3.7", + "dev": true, + "requires": { + "ethereum-common": "^0.0.18", + "ethereumjs-util": "^5.0.0" + } + }, + "ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "ethereumjs-vm": { + "version": "2.6.0", + "dev": true, + "requires": { + "async": "^2.1.2", + "async-eventemitter": "^0.2.2", + "ethereumjs-account": "^2.0.3", + "ethereumjs-block": "~2.2.0", + "ethereumjs-common": "^1.1.0", + "ethereumjs-util": "^6.0.0", + "fake-merkle-patricia-tree": "^1.0.1", + "functional-red-black-tree": "^1.0.1", + "merkle-patricia-tree": "^2.3.2", + "rustbn.js": "~0.2.0", + "safe-buffer": "^5.1.1" + }, + "dependencies": { + "ethereumjs-block": { + "version": "2.2.2", + "dev": true, + "requires": { + "async": "^2.0.1", + "ethereumjs-common": "^1.5.0", + "ethereumjs-tx": "^2.1.1", + "ethereumjs-util": "^5.0.0", + "merkle-patricia-tree": "^2.1.2" + }, + "dependencies": { + "ethereumjs-util": { + "version": "5.2.1", + "dev": true, + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + } + } + }, + "ethereumjs-tx": { + "version": "2.1.2", + "dev": true, + "requires": { + "ethereumjs-common": "^1.5.0", + "ethereumjs-util": "^6.0.0" + } + }, + "ethereumjs-util": { + "version": "6.2.1", + "dev": true, + "requires": { + "@types/bn.js": "^4.11.3", + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "0.1.6", + "rlp": "^2.2.3" + } + } + } + }, + "isarray": { + "version": "0.0.1", + "dev": true + }, + "level-codec": { + "version": "7.0.1", + "dev": true + }, + "level-errors": { + "version": "1.0.5", + "dev": true, + "requires": { + "errno": "~0.1.1" + } + }, + "level-iterator-stream": { + "version": "1.3.1", + "dev": true, + "requires": { + "inherits": "^2.0.1", + "level-errors": "^1.0.3", + "readable-stream": "^1.0.33", + "xtend": "^4.0.0" + }, + "dependencies": { + "readable-stream": { + "version": "1.1.14", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + } + } + }, + "level-ws": { + "version": "0.0.0", + "dev": true, + "requires": { + "readable-stream": "~1.0.15", + "xtend": "~2.1.1" + }, + "dependencies": { + "readable-stream": { + "version": "1.0.34", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "xtend": { + "version": "2.1.2", + "dev": true, + "requires": { + "object-keys": "~0.4.0" + } + } + } + }, + "levelup": { + "version": "1.3.9", + "dev": true, + "requires": { + "deferred-leveldown": "~1.2.1", + "level-codec": "~7.0.0", + "level-errors": "~1.0.3", + "level-iterator-stream": "~1.3.0", + "prr": "~1.0.1", + "semver": "~5.4.1", + "xtend": "~4.0.0" + } + }, + "ltgt": { + "version": "2.2.1", + "dev": true + }, + "memdown": { + "version": "1.4.1", + "dev": true, + "requires": { + "abstract-leveldown": "~2.7.1", + "functional-red-black-tree": "^1.0.1", + "immediate": "^3.2.3", + "inherits": "~2.0.1", + "ltgt": "~2.2.0", + "safe-buffer": "~5.1.1" + }, + "dependencies": { + "abstract-leveldown": { + "version": "2.7.2", + "dev": true, + "requires": { + "xtend": "~4.0.0" + } + } + } + }, + "merkle-patricia-tree": { + "version": "2.3.2", + "dev": true, + "requires": { + "async": "^1.4.2", + "ethereumjs-util": "^5.0.0", + "level-ws": "0.0.0", + "levelup": "^1.2.1", + "memdown": "^1.0.0", + "readable-stream": "^2.0.0", + "rlp": "^2.0.0", + "semaphore": ">=1.0.1" + }, + "dependencies": { + "async": { + "version": "1.5.2", + "dev": true + } + } + }, + "object-keys": { + "version": "0.4.0", + "dev": true + }, + "safe-buffer": { + "version": "5.1.2", + "dev": true + }, + "semver": { + "version": "5.4.1", + "dev": true + }, + "string_decoder": { + "version": "0.10.31", + "dev": true + }, + "ws": { + "version": "5.2.2", + "dev": true, + "requires": { + "async-limiter": "~1.0.0" + } + } + } + }, + "web3-providers-http": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "web3-core-helpers": "1.2.11", + "xhr2-cookies": "1.1.0" + } + }, + "web3-providers-ipc": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "oboe": "2.1.4", + "underscore": "1.9.1", + "web3-core-helpers": "1.2.11" + } + }, + "web3-providers-ws": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "eventemitter3": "4.0.4", + "underscore": "1.9.1", + "web3-core-helpers": "1.2.11", + "websocket": "^1.0.31" + } + }, + "web3-shh": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "web3-core": "1.2.11", + "web3-core-method": "1.2.11", + "web3-core-subscriptions": "1.2.11", + "web3-net": "1.2.11" + } + }, + "web3-utils": { + "version": "1.2.11", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^4.11.9", + "eth-lib": "0.2.8", + "ethereum-bloom-filters": "^1.0.6", + "ethjs-unit": "0.1.6", + "number-to-bn": "1.7.0", + "randombytes": "^2.1.0", + "underscore": "1.9.1", + "utf8": "3.0.0" + }, + "dependencies": { + "eth-lib": { + "version": "0.2.8", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^4.11.6", + "elliptic": "^6.4.0", + "xhr-request-promise": "^0.1.2" + } + } + } + }, + "websocket": { + "version": "1.0.32", + "dev": true, + "requires": { + "bufferutil": "^4.0.1", + "debug": "^2.2.0", + "es5-ext": "^0.10.50", + "typedarray-to-buffer": "^3.1.5", + "utf-8-validate": "^5.0.2", + "yaeti": "^0.0.6" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "dev": true + } + } + }, + "whatwg-fetch": { + "version": "2.0.4", + "dev": true + }, + "wrappy": { + "version": "1.0.2", + "dev": true + }, + "ws": { + "version": "3.3.3", + "dev": true, + "optional": true, + "requires": { + "async-limiter": "~1.0.0", + "safe-buffer": "~5.1.0", + "ultron": "~1.1.0" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "dev": true, + "optional": true + } + } + }, + "xhr": { + "version": "2.6.0", + "dev": true, + "requires": { + "global": "~4.4.0", + "is-function": "^1.0.1", + "parse-headers": "^2.0.0", + "xtend": "^4.0.0" + } + }, + "xhr-request": { + "version": "1.1.0", + "dev": true, + "optional": true, + "requires": { + "buffer-to-arraybuffer": "^0.0.5", + "object-assign": "^4.1.1", + "query-string": "^5.0.1", + "simple-get": "^2.7.0", + "timed-out": "^4.0.1", + "url-set-query": "^1.0.0", + "xhr": "^2.0.4" + } + }, + "xhr-request-promise": { + "version": "0.1.3", + "dev": true, + "optional": true, + "requires": { + "xhr-request": "^1.1.0" + } + }, + "xhr2-cookies": { + "version": "1.1.0", + "dev": true, + "optional": true, + "requires": { + "cookiejar": "^2.1.1" + } + }, + "xtend": { + "version": "4.0.2", + "dev": true + }, + "yaeti": { + "version": "0.0.6", + "dev": true + }, + "yallist": { + "version": "3.1.1", + "dev": true + } + } + }, + "gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "peer": true + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", + "dev": true + }, + "get-intrinsic": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", + "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1" + } + }, + "get-symbol-description": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", + "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.1" + } + }, + "getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "requires": { + "assert-plus": "^1.0.0" + } + }, + "glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, + "global": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/global/-/global-4.4.0.tgz", + "integrity": "sha512-wv/LAoHdRE3BeTGz53FAamhGlPLhlssK45usmGFThIi4XqnBmjKQ16u+RNbP7WvigRZDxUsM0J3gcQ5yicaL0w==", + "requires": { + "min-document": "^2.19.0", + "process": "^0.11.10" + } + }, + "globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==" + }, + "graceful-fs": { + "version": "4.2.9", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz", + "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==", + "dev": true + }, + "growl": { + "version": "1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", + "dev": true + }, + "har-schema": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" + }, + "har-validator": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", + "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", + "requires": { + "ajv": "^6.12.3", + "har-schema": "^2.0.0" + } + }, + "hardhat": { + "version": "2.8.2", + "resolved": "https://registry.npmjs.org/hardhat/-/hardhat-2.8.2.tgz", + "integrity": "sha512-cBUqzZGOi+lwKHArWl5Be7zeFIwlu1IUXOna6k5XhORZ8hAWDVbAJBVfxgmjkcX5GffIf0C5g841zRxo36sQ5g==", + "dev": true, + "requires": { + "@ethereumjs/block": "^3.6.0", + "@ethereumjs/blockchain": "^5.5.0", + "@ethereumjs/common": "^2.6.0", + "@ethereumjs/tx": "^3.4.0", + "@ethereumjs/vm": "^5.6.0", + "@ethersproject/abi": "^5.1.2", + "@sentry/node": "^5.18.1", + "@solidity-parser/parser": "^0.14.0", + "@types/bn.js": "^5.1.0", + "@types/lru-cache": "^5.1.0", + "abort-controller": "^3.0.0", + "adm-zip": "^0.4.16", + "ansi-escapes": "^4.3.0", + "chalk": "^2.4.2", + "chokidar": "^3.4.0", + "ci-info": "^2.0.0", + "debug": "^4.1.1", + "enquirer": "^2.3.0", + "env-paths": "^2.2.0", + "eth-sig-util": "^2.5.2", + "ethereum-cryptography": "^0.1.2", + "ethereumjs-abi": "^0.6.8", + "ethereumjs-util": "^7.1.3", + "find-up": "^2.1.0", + "fp-ts": "1.19.3", + "fs-extra": "^7.0.1", + "glob": "^7.1.3", + "https-proxy-agent": "^5.0.0", + "immutable": "^4.0.0-rc.12", + "io-ts": "1.10.4", + "lodash": "^4.17.11", + "merkle-patricia-tree": "^4.2.2", + "mnemonist": "^0.38.0", + "mocha": "^7.2.0", + "node-fetch": "^2.6.0", + "qs": "^6.7.0", + "raw-body": "^2.4.1", + "resolve": "1.17.0", + "semver": "^6.3.0", + "slash": "^3.0.0", + "solc": "0.7.3", + "source-map-support": "^0.5.13", + "stacktrace-parser": "^0.1.10", + "true-case-path": "^2.2.1", + "tsort": "0.0.1", + "uuid": "^8.3.2", + "ws": "^7.4.6" + }, + "dependencies": { + "bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true + }, + "ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dev": true, + "requires": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + } + }, + "jsonfile": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", + "integrity": "sha1-NzaitCi4e72gzIO1P6PWM6NcKug=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.6" + } + }, + "level-ws": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/level-ws/-/level-ws-2.0.0.tgz", + "integrity": "sha512-1iv7VXx0G9ec1isqQZ7y5LmoZo/ewAsyDHNA8EFDW5hqH2Kqovm33nSFkSdnLLAK+I5FlT+lo5Cw9itGe+CpQA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "readable-stream": "^3.1.0", + "xtend": "^4.0.1" + } + }, + "merkle-patricia-tree": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/merkle-patricia-tree/-/merkle-patricia-tree-4.2.2.tgz", + "integrity": "sha512-eqZYNTshcYx9aESkSPr71EqwsR/QmpnObDEV4iLxkt/x/IoLYZYjJvKY72voP/27Vy61iMOrfOG6jrn7ttXD+Q==", + "dev": true, + "requires": { + "@types/levelup": "^4.3.0", + "ethereumjs-util": "^7.1.2", + "level-mem": "^5.0.1", + "level-ws": "^2.0.0", + "readable-stream": "^3.6.0", + "rlp": "^2.2.4", + "semaphore-async-await": "^1.5.1" + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "resolve": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.17.0.tgz", + "integrity": "sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + }, + "solc": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/solc/-/solc-0.7.3.tgz", + "integrity": "sha512-GAsWNAjGzIDg7VxzP6mPjdurby3IkGCjQcM8GFYZT6RyaoUZKmMU6Y7YwG+tFGhv7dwZ8rmR4iwFDrrD99JwqA==", + "dev": true, + "requires": { + "command-exists": "^1.2.8", + "commander": "3.0.2", + "follow-redirects": "^1.12.1", + "fs-extra": "^0.30.0", + "js-sha3": "0.8.0", + "memorystream": "^0.3.1", + "require-from-string": "^2.0.0", + "semver": "^5.5.0", + "tmp": "0.0.33" + }, + "dependencies": { + "fs-extra": { + "version": "0.30.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-0.30.0.tgz", + "integrity": "sha1-8jP/zAjU2n1DLapEl3aYnbHfk/A=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "jsonfile": "^2.1.0", + "klaw": "^1.0.0", + "path-is-absolute": "^1.0.0", + "rimraf": "^2.2.8" + } + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } + } + }, + "ws": { + "version": "7.5.6", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.6.tgz", + "integrity": "sha512-6GLgCqo2cy2A2rjCNFlxQS6ZljG/coZfZXclldI8FB/1G3CCI36Zd8xy2HrFVACi8tfk5XrgLQEk+P0Tnz9UcA==", + "dev": true, + "requires": {} + } + } + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-bigints": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.1.tgz", + "integrity": "sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==", + "dev": true + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" + }, + "has-symbols": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", + "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", + "dev": true + }, + "has-tostringtag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", + "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "dev": true, + "requires": { + "has-symbols": "^1.0.2" + } + }, + "hash-base": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.1.0.tgz", + "integrity": "sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==", + "requires": { + "inherits": "^2.0.4", + "readable-stream": "^3.6.0", + "safe-buffer": "^5.2.0" + }, + "dependencies": { + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + } + } + }, + "hash.js": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.7.tgz", + "integrity": "sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==", + "requires": { + "inherits": "^2.0.3", + "minimalistic-assert": "^1.0.1" + } + }, + "he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true + }, + "hmac-drbg": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz", + "integrity": "sha1-0nRXAQJabHdabFRXk+1QL8DGSaE=", + "requires": { + "hash.js": "^1.0.3", + "minimalistic-assert": "^1.0.0", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true + }, + "http-errors": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz", + "integrity": "sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==", + "dev": true, + "requires": { + "depd": "~1.1.2", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.1" + } + }, + "http-signature": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "requires": { + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + } + }, + "https-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", + "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", + "dev": true, + "requires": { + "agent-base": "6", + "debug": "4" + } + }, + "iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dev": true, + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } + }, + "idna-uts46-hx": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/idna-uts46-hx/-/idna-uts46-hx-2.3.1.tgz", + "integrity": "sha512-PWoF9Keq6laYdIRwwCdhTPl60xRqAloYNMQLiyUnG42VjT53oW07BXIRM+NK7eQjzXjAk2gUvX9caRxlnF9TAA==", + "dev": true, + "requires": { + "punycode": "2.1.0" + } + }, + "ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true + }, + "ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "dev": true + }, + "immediate": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.3.0.tgz", + "integrity": "sha512-HR7EVodfFUdQCTIeySw+WDRFJlPcLOJbXfwwZ7Oom6tjsvZ3bOkCDJHehQC3nxJrv7+f9XecwazynjU8e4Vw3Q==" + }, + "immutable": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/immutable/-/immutable-4.0.0.tgz", + "integrity": "sha512-zIE9hX70qew5qTUjSS7wi1iwj/l7+m54KWU247nhM3v806UdGj1yDndXj+IOYxxtW9zyLI+xqFNZjTuDaLUqFw==", + "dev": true + }, + "import-fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-2.0.0.tgz", + "integrity": "sha1-2BNVwVYS04bGH53dOSLUMEgipUY=", + "dev": true, + "requires": { + "caller-path": "^2.0.0", + "resolve-from": "^3.0.0" + } + }, + "imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "inquirer": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", + "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", + "dev": true, + "requires": { + "ansi-escapes": "^3.2.0", + "chalk": "^2.4.2", + "cli-cursor": "^2.1.0", + "cli-width": "^2.0.0", + "external-editor": "^3.0.3", + "figures": "^2.0.0", + "lodash": "^4.17.12", + "mute-stream": "0.0.7", + "run-async": "^2.2.0", + "rxjs": "^6.4.0", + "string-width": "^2.1.0", + "strip-ansi": "^5.1.0", + "through": "^2.3.6" + }, + "dependencies": { + "ansi-escapes": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", + "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", + "dev": true + }, + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "internal-slot": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", + "integrity": "sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==", + "dev": true, + "requires": { + "get-intrinsic": "^1.1.0", + "has": "^1.0.3", + "side-channel": "^1.0.4" + } + }, + "invert-kv": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-1.0.0.tgz", + "integrity": "sha1-EEqOSqym09jNFXqO+L+rLXo//bY=", + "dev": true + }, + "io-ts": { + "version": "1.10.4", + "resolved": "https://registry.npmjs.org/io-ts/-/io-ts-1.10.4.tgz", + "integrity": "sha512-b23PteSnYXSONJ6JQXRAlvJhuw8KOtkqa87W4wDtvMrud/DTJd5X+NpOOI+O/zZwVq6v0VLAaJ+1EDViKEuN9g==", + "dev": true, + "requires": { + "fp-ts": "^1.0.0" + } + }, + "is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true + }, + "is-bigint": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", + "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", + "dev": true, + "requires": { + "has-bigints": "^1.0.1" + } + }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "requires": { + "binary-extensions": "^2.0.0" + } + }, + "is-boolean-object": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", + "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + } + }, + "is-buffer": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz", + "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==", + "dev": true + }, + "is-callable": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", + "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", + "dev": true + }, + "is-ci": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", + "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", + "dev": true, + "requires": { + "ci-info": "^2.0.0" + } + }, + "is-core-module": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz", + "integrity": "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==", + "requires": { + "has": "^1.0.3" + } + }, + "is-date-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", + "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", + "dev": true, + "requires": { + "has-tostringtag": "^1.0.0" + } + }, + "is-directory": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/is-directory/-/is-directory-0.3.1.tgz", + "integrity": "sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE=", + "dev": true + }, + "is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "dev": true + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true + }, + "is-fn": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fn/-/is-fn-1.0.0.tgz", + "integrity": "sha1-lUPV3nvPWwiiLsiiC65uKG1RDYw=" + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "is-function": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-function/-/is-function-1.0.2.tgz", + "integrity": "sha512-lw7DUp0aWXYg+CBCN+JKkcE0Q2RayZnSvnZBlwgxHBQhqt5pZNVy4Ri7H9GmmXkdu7LUthszM+Tor1u/2iBcpQ==" + }, + "is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-hex-prefixed": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-hex-prefixed/-/is-hex-prefixed-1.0.0.tgz", + "integrity": "sha1-fY035q135dEnFIkTxXPggtd39VQ=" + }, + "is-negative-zero": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", + "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", + "dev": true + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "is-number-object": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.6.tgz", + "integrity": "sha512-bEVOqiRcvo3zO1+G2lVMy+gkkEm9Yh7cDMRusKKu5ZJKPUYSJwICTKZrNKHA2EbSP0Tu0+6B/emsYNHZyn6K8g==", + "dev": true, + "requires": { + "has-tostringtag": "^1.0.0" + } + }, + "is-regex": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + } + }, + "is-shared-array-buffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.1.tgz", + "integrity": "sha512-IU0NmyknYZN0rChcKhRO1X8LYz5Isj/Fsqh8NJOSf+N/hCOTwy29F32Ik7a+QszE63IdvmwdTPDd6cZ5pg4cwA==", + "dev": true + }, + "is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=" + }, + "is-string": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "dev": true, + "requires": { + "has-tostringtag": "^1.0.0" + } + }, + "is-symbol": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", + "dev": true, + "requires": { + "has-symbols": "^1.0.2" + } + }, + "is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" + }, + "is-url": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/is-url/-/is-url-1.2.4.tgz", + "integrity": "sha512-ITvGim8FhRiYe4IQ5uHSkj7pVaPDrCTkNd3yq3cV7iZAcJdHTUMPMEHcqSOy9xZ9qFenQCvi+2wjH9a1nXqHww==", + "dev": true + }, + "is-utf8": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz", + "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=", + "dev": true + }, + "is-weakref": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "dev": true, + "requires": { + "call-bind": "^1.0.2" + } + }, + "is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "dev": true, + "requires": { + "is-docker": "^2.0.0" + } + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" + }, + "js-sha3": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz", + "integrity": "sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==", + "dev": true + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + }, + "js-yaml": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, + "jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" + }, + "jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==" + }, + "json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "dev": true + }, + "json-rpc-engine": { + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/json-rpc-engine/-/json-rpc-engine-5.4.0.tgz", + "integrity": "sha512-rAffKbPoNDjuRnXkecTjnsE3xLLrb00rEkdgalINhaYVYIxDwWtvYBr9UFbhTvPB1B2qUOLoFd/cV6f4Q7mh7g==", + "requires": { + "eth-rpc-errors": "^3.0.0", + "safe-event-emitter": "^1.0.1" + } + }, + "json-rpc-random-id": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-rpc-random-id/-/json-rpc-random-id-1.0.1.tgz", + "integrity": "sha1-uknZat7RRE27jaPSA3SKy7zeyMg=" + }, + "json-schema": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==" + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, + "json-stable-stringify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz", + "integrity": "sha1-mnWdOcXy/1A/1TAGRu1EX4jE+a8=", + "requires": { + "jsonify": "~0.0.0" + } + }, + "json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", + "dev": true + }, + "json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" + }, + "json5": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", + "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==", + "peer": true, + "requires": { + "minimist": "^1.2.5" + } + }, + "jsonfile": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", + "integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.6" + } + }, + "jsonify": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/jsonify/-/jsonify-0.0.0.tgz", + "integrity": "sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM=" + }, + "jsprim": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", + "integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==", + "requires": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.4.0", + "verror": "1.10.0" + } + }, + "keccak": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/keccak/-/keccak-3.0.2.tgz", + "integrity": "sha512-PyKKjkH53wDMLGrvmRGSNWgmSxZOUqbnXwKL9tmgbFYA1iAYqW21kfR7mZXV0MlESiefxQQE9X9fTa3X+2MPDQ==", + "requires": { + "node-addon-api": "^2.0.0", + "node-gyp-build": "^4.2.0", + "readable-stream": "^3.6.0" + }, + "dependencies": { + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, + "klaw": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz", + "integrity": "sha1-QIhDO0azsbolnXh4XY6W9zugJDk=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.9" + } + }, + "klaw-sync": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/klaw-sync/-/klaw-sync-6.0.0.tgz", + "integrity": "sha512-nIeuVSzdCCs6TDPTqI8w1Yre34sSq7AkZ4B3sfOBbI2CgVSB4Du4aLQijFU2+lhAFCwt9+42Hel6lQNIv6AntQ==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.11" + } + }, + "lcid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lcid/-/lcid-1.0.0.tgz", + "integrity": "sha1-MIrMr6C8SDo4Z7S28rlQYlHRuDU=", + "dev": true, + "requires": { + "invert-kv": "^1.0.0" + } + }, + "level-codec": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/level-codec/-/level-codec-9.0.2.tgz", + "integrity": "sha512-UyIwNb1lJBChJnGfjmO0OR+ezh2iVu1Kas3nvBS/BzGnx79dv6g7unpKIDNPMhfdTEGoc7mC8uAu51XEtX+FHQ==", + "dev": true, + "requires": { + "buffer": "^5.6.0" + } + }, + "level-concat-iterator": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz", + "integrity": "sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw==", + "dev": true + }, + "level-errors": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/level-errors/-/level-errors-2.0.1.tgz", + "integrity": "sha512-UVprBJXite4gPS+3VznfgDSU8PTRuVX0NXwoWW50KLxd2yw4Y1t2JUR5In1itQnudZqRMT9DlAM3Q//9NCjCFw==", + "dev": true, + "requires": { + "errno": "~0.1.1" + } + }, + "level-iterator-stream": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/level-iterator-stream/-/level-iterator-stream-4.0.2.tgz", + "integrity": "sha512-ZSthfEqzGSOMWoUGhTXdX9jv26d32XJuHz/5YnuHZzH6wldfWMOVwI9TBtKcya4BKTyTt3XVA0A3cF3q5CY30Q==", + "dev": true, + "requires": { + "inherits": "^2.0.4", + "readable-stream": "^3.4.0", + "xtend": "^4.0.2" + }, + "dependencies": { + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, + "level-mem": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/level-mem/-/level-mem-5.0.1.tgz", + "integrity": "sha512-qd+qUJHXsGSFoHTziptAKXoLX87QjR7v2KMbqncDXPxQuCdsQlzmyX+gwrEHhlzn08vkf8TyipYyMmiC6Gobzg==", + "dev": true, + "requires": { + "level-packager": "^5.0.3", + "memdown": "^5.0.0" + } + }, + "level-packager": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/level-packager/-/level-packager-5.1.1.tgz", + "integrity": "sha512-HMwMaQPlTC1IlcwT3+swhqf/NUO+ZhXVz6TY1zZIIZlIR0YSn8GtAAWmIvKjNY16ZkEg/JcpAuQskxsXqC0yOQ==", + "dev": true, + "requires": { + "encoding-down": "^6.3.0", + "levelup": "^4.3.2" + } + }, + "level-supports": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-1.0.1.tgz", + "integrity": "sha512-rXM7GYnW8gsl1vedTJIbzOrRv85c/2uCMpiiCzO2fndd06U/kUXEEU9evYn4zFggBOg36IsBW8LzqIpETwwQzg==", + "dev": true, + "requires": { + "xtend": "^4.0.2" + } + }, + "level-ws": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/level-ws/-/level-ws-0.0.0.tgz", + "integrity": "sha1-Ny5RIXeSSgBCSwtDrvK7QkltIos=", + "requires": { + "readable-stream": "~1.0.15", + "xtend": "~2.1.1" + }, + "dependencies": { + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "object-keys": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-0.4.0.tgz", + "integrity": "sha1-KKaq50KN0sOpLz2V8hM13SBOAzY=" + }, + "readable-stream": { + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + }, + "xtend": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-2.1.2.tgz", + "integrity": "sha1-bv7MKk2tjmlixJAbM3znuoe10os=", + "requires": { + "object-keys": "~0.4.0" + } + } + } + }, + "levelup": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/levelup/-/levelup-4.4.0.tgz", + "integrity": "sha512-94++VFO3qN95cM/d6eBXvd894oJE0w3cInq9USsyQzzoJxmiYzPAocNcuGCPGGjoXqDVJcr3C1jzt1TSjyaiLQ==", + "dev": true, + "requires": { + "deferred-leveldown": "~5.3.0", + "level-errors": "~2.0.0", + "level-iterator-stream": "~4.0.0", + "level-supports": "~1.0.0", + "xtend": "~4.0.0" + } + }, + "levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "dev": true, + "requires": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + } + }, + "load-json-file": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz", + "integrity": "sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^2.2.0", + "pify": "^2.0.0", + "pinkie-promise": "^2.0.0", + "strip-bom": "^2.0.0" + }, + "dependencies": { + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + } + } + }, + "locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "dev": true, + "requires": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + } + }, + "lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "lodash.assign": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/lodash.assign/-/lodash.assign-4.2.0.tgz", + "integrity": "sha1-DZnzzNem0mHRm9rrkkUAXShYCOc=", + "dev": true + }, + "lodash.debounce": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", + "integrity": "sha1-gteb/zCmfEAF/9XiUVMArZyk168=" + }, + "lodash.flatmap": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.flatmap/-/lodash.flatmap-4.5.0.tgz", + "integrity": "sha1-74y/QI9uSCaGYzRTBcaswLd4cC4=" + }, + "log-symbols": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-3.0.0.tgz", + "integrity": "sha512-dSkNGuI7iG3mfvDzUuYZyvk5dD9ocYCYzNU6CYDE6+Xqd+gwme6Z00NS3dUh8mq/73HaEtT7m6W+yUPtU6BZnQ==", + "dev": true, + "requires": { + "chalk": "^2.4.2" + } + }, + "lru_map": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/lru_map/-/lru_map-0.3.3.tgz", + "integrity": "sha1-tcg1G5Rky9dQM1p5ZQoOwOVhGN0=", + "dev": true + }, + "lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "requires": { + "yallist": "^3.0.2" + } + }, + "ltgt": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/ltgt/-/ltgt-2.2.1.tgz", + "integrity": "sha1-81ypHEk/e3PaDgdJUwTxezH4fuU=" + }, + "mcl-wasm": { + "version": "0.7.9", + "resolved": "https://registry.npmjs.org/mcl-wasm/-/mcl-wasm-0.7.9.tgz", + "integrity": "sha512-iJIUcQWA88IJB/5L15GnJVnSQJmf/YaxxV6zRavv83HILHaJQb6y0iFyDMdDO0gN8X37tdxmAOrH/P8B6RB8sQ==", + "dev": true + }, + "md5.js": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", + "integrity": "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==", + "requires": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "memdown": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/memdown/-/memdown-5.1.0.tgz", + "integrity": "sha512-B3J+UizMRAlEArDjWHTMmadet+UKwHd3UjMgGBkZcKAxAYVPS9o0Yeiha4qvz7iGiL2Sb3igUft6p7nbFWctpw==", + "dev": true, + "requires": { + "abstract-leveldown": "~6.2.1", + "functional-red-black-tree": "~1.0.1", + "immediate": "~3.2.3", + "inherits": "~2.0.1", + "ltgt": "~2.2.0", + "safe-buffer": "~5.2.0" + }, + "dependencies": { + "abstract-leveldown": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz", + "integrity": "sha512-BsLm5vFMRUrrLeCcRc+G0t2qOaTzpoJQLOubq2XM72eNpjF5UdU5o/5NvlNhx95XHcAvcl8OMXr4mlg/fRgUXQ==", + "dev": true, + "requires": { + "buffer": "^5.5.0", + "immediate": "^3.2.3", + "level-concat-iterator": "~2.0.0", + "level-supports": "~1.0.0", + "xtend": "~4.0.0" + } + }, + "immediate": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.2.3.tgz", + "integrity": "sha1-0UD6j2FGWb1lQSMwl92qwlzdmRw=", + "dev": true + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true + } + } + }, + "memorystream": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz", + "integrity": "sha1-htcJCzDORV1j+64S3aUaR93K+bI=", + "dev": true + }, + "merkle-patricia-tree": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/merkle-patricia-tree/-/merkle-patricia-tree-2.3.2.tgz", + "integrity": "sha512-81PW5m8oz/pz3GvsAwbauj7Y00rqm81Tzad77tHBwU7pIAtN+TJnMSOJhxBKflSVYhptMMb9RskhqHqrSm1V+g==", + "requires": { + "async": "^1.4.2", + "ethereumjs-util": "^5.0.0", + "level-ws": "0.0.0", + "levelup": "^1.2.1", + "memdown": "^1.0.0", + "readable-stream": "^2.0.0", + "rlp": "^2.0.0", + "semaphore": ">=1.0.1" + }, + "dependencies": { + "abstract-leveldown": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-2.6.3.tgz", + "integrity": "sha512-2++wDf/DYqkPR3o5tbfdhF96EfMApo1GpPfzOsR/ZYXdkSmELlvOOEAl9iKkRsktMPHdGjO4rtkBpf2I7TiTeA==", + "requires": { + "xtend": "~4.0.0" + } + }, + "async": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz", + "integrity": "sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo=" + }, + "deferred-leveldown": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/deferred-leveldown/-/deferred-leveldown-1.2.2.tgz", + "integrity": "sha512-uukrWD2bguRtXilKt6cAWKyoXrTSMo5m7crUdLfWQmu8kIm88w3QZoUL+6nhpfKVmhHANER6Re3sKoNoZ3IKMA==", + "requires": { + "abstract-leveldown": "~2.6.0" + } + }, + "ethereumjs-util": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz", + "integrity": "sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ==", + "requires": { + "bn.js": "^4.11.0", + "create-hash": "^1.1.2", + "elliptic": "^6.5.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "^0.1.3", + "rlp": "^2.0.0", + "safe-buffer": "^5.1.1" + } + }, + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "level-codec": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/level-codec/-/level-codec-7.0.1.tgz", + "integrity": "sha512-Ua/R9B9r3RasXdRmOtd+t9TCOEIIlts+TN/7XTT2unhDaL6sJn83S3rUyljbr6lVtw49N3/yA0HHjpV6Kzb2aQ==" + }, + "level-errors": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/level-errors/-/level-errors-1.0.5.tgz", + "integrity": "sha512-/cLUpQduF6bNrWuAC4pwtUKA5t669pCsCi2XbmojG2tFeOr9j6ShtdDCtFFQO1DRt+EVZhx9gPzP9G2bUaG4ig==", + "requires": { + "errno": "~0.1.1" + } + }, + "level-iterator-stream": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/level-iterator-stream/-/level-iterator-stream-1.3.1.tgz", + "integrity": "sha1-5Dt4sagUPm+pek9IXrjqUwNS8u0=", + "requires": { + "inherits": "^2.0.1", + "level-errors": "^1.0.3", + "readable-stream": "^1.0.33", + "xtend": "^4.0.0" + }, + "dependencies": { + "readable-stream": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + } + } + }, + "levelup": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/levelup/-/levelup-1.3.9.tgz", + "integrity": "sha512-VVGHfKIlmw8w1XqpGOAGwq6sZm2WwWLmlDcULkKWQXEA5EopA8OBNJ2Ck2v6bdk8HeEZSbCSEgzXadyQFm76sQ==", + "requires": { + "deferred-leveldown": "~1.2.1", + "level-codec": "~7.0.0", + "level-errors": "~1.0.3", + "level-iterator-stream": "~1.3.0", + "prr": "~1.0.1", + "semver": "~5.4.1", + "xtend": "~4.0.0" + } + }, + "memdown": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/memdown/-/memdown-1.4.1.tgz", + "integrity": "sha1-tOThkhdGZP+65BNhqlAPMRnv4hU=", + "requires": { + "abstract-leveldown": "~2.7.1", + "functional-red-black-tree": "^1.0.1", + "immediate": "^3.2.3", + "inherits": "~2.0.1", + "ltgt": "~2.2.0", + "safe-buffer": "~5.1.1" + }, + "dependencies": { + "abstract-leveldown": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-2.7.2.tgz", + "integrity": "sha512-+OVvxH2rHVEhWLdbudP6p0+dNMXu8JA1CbhP19T8paTYAcX7oJ4OVjT+ZUVpv7mITxXHqDMej+GdqXBmXkw09w==", + "requires": { + "xtend": "~4.0.0" + } + } + } + }, + "semver": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.4.1.tgz", + "integrity": "sha512-WfG/X9+oATh81XtllIo/I8gOiY9EXRdv1cQdyykeXK17YcUW3EXUAi2To4pcH6nZtJPr7ZOpM5OMyWJZm+8Rsg==" + }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + } + } + }, + "micromatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", + "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", + "dev": true, + "requires": { + "braces": "^3.0.1", + "picomatch": "^2.2.3" + } + }, + "miller-rabin": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/miller-rabin/-/miller-rabin-4.0.1.tgz", + "integrity": "sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==", + "dev": true, + "requires": { + "bn.js": "^4.0.0", + "brorand": "^1.0.1" + } + }, + "mime-db": { + "version": "1.51.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", + "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==" + }, + "mime-types": { + "version": "2.1.34", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", + "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", + "requires": { + "mime-db": "1.51.0" + } + }, + "mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", + "dev": true + }, + "min-document": { + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz", + "integrity": "sha1-e9KC4/WELtKVu3SM3Z8f+iyCRoU=", + "requires": { + "dom-walk": "^0.1.0" + } + }, + "minimalistic-assert": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", + "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==" + }, + "minimalistic-crypto-utils": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz", + "integrity": "sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo=" + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" + }, + "mkdirp": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + } + }, + "mnemonist": { + "version": "0.38.5", + "resolved": "https://registry.npmjs.org/mnemonist/-/mnemonist-0.38.5.tgz", + "integrity": "sha512-bZTFT5rrPKtPJxj8KSV0WkPyNxl72vQepqqVUAW2ARUpUSF2qXMB6jZj7hW5/k7C1rtpzqbD/IIbJwLXUjCHeg==", + "dev": true, + "requires": { + "obliterator": "^2.0.0" + } + }, + "mocha": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-7.2.0.tgz", + "integrity": "sha512-O9CIypScywTVpNaRrCAgoUnJgozpIofjKUYmJhiCIJMiuYnLI6otcb1/kpW9/n/tJODHGZ7i8aLQoDVsMtOKQQ==", + "dev": true, + "requires": { + "ansi-colors": "3.2.3", + "browser-stdout": "1.3.1", + "chokidar": "3.3.0", + "debug": "3.2.6", + "diff": "3.5.0", + "escape-string-regexp": "1.0.5", + "find-up": "3.0.0", + "glob": "7.1.3", + "growl": "1.10.5", + "he": "1.2.0", + "js-yaml": "3.13.1", + "log-symbols": "3.0.0", + "minimatch": "3.0.4", + "mkdirp": "0.5.5", + "ms": "2.1.1", + "node-environment-flags": "1.0.6", + "object.assign": "4.1.0", + "strip-json-comments": "2.0.1", + "supports-color": "6.0.0", + "which": "1.3.1", + "wide-align": "1.1.3", + "yargs": "13.3.2", + "yargs-parser": "13.1.2", + "yargs-unparser": "1.6.0" + }, + "dependencies": { + "ansi-colors": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.3.tgz", + "integrity": "sha512-LEHHyuhlPY3TmuUYMh2oz89lTShfvgbmzaBcxve9t/9Wuy7Dwf4yoAKcND7KFT1HAQfqZ12qtc+DUrBMeKF9nw==", + "dev": true + }, + "chokidar": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.3.0.tgz", + "integrity": "sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A==", + "dev": true, + "requires": { + "anymatch": "~3.1.1", + "braces": "~3.0.2", + "fsevents": "~2.1.1", + "glob-parent": "~5.1.0", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.2.0" + } + }, + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "fsevents": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.3.tgz", + "integrity": "sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ==", + "dev": true, + "optional": true + }, + "glob": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", + "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "ms": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", + "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", + "dev": true + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "readdirp": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.2.0.tgz", + "integrity": "sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ==", + "dev": true, + "requires": { + "picomatch": "^2.0.4" + } + }, + "supports-color": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.0.0.tgz", + "integrity": "sha512-on9Kwidc1IUQo+bQdhi8+Tijpo0e1SS6RoGo2guUwn5vdaxw8RXOF9Vb2ws+ihWOmh4JnCJOvaziZWP1VABaLg==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "mute-stream": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", + "dev": true + }, + "natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", + "dev": true + }, + "nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "dev": true + }, + "node-addon-api": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-2.0.2.tgz", + "integrity": "sha512-Ntyt4AIXyaLIuMHF6IOoTakB3K+RWxwtsHNRxllEoA6vPwP9o4866g6YWDLUdnucilZhmkxiHwHr11gAENw+QA==" + }, + "node-environment-flags": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/node-environment-flags/-/node-environment-flags-1.0.6.tgz", + "integrity": "sha512-5Evy2epuL+6TM0lCQGpFIj6KwiEsGh1SrHUhTbNX+sLbBtjidPZFAnVK9y5yU1+h//RitLbRHTIMyxQPtxMdHw==", + "dev": true, + "requires": { + "object.getownpropertydescriptors": "^2.0.3", + "semver": "^5.7.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } + } + }, + "node-fetch": { + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "dev": true, + "requires": { + "whatwg-url": "^5.0.0" + } + }, + "node-gyp-build": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==" + }, + "node-releases": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.1.tgz", + "integrity": "sha512-CqyzN6z7Q6aMeF/ktcMVTzhAHCEpf8SOarwpzpf8pNBY2k5/oM34UHldUwp8VKI7uxct2HxSRdJjBaZeESzcxA==" + }, + "nofilter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/nofilter/-/nofilter-1.0.4.tgz", + "integrity": "sha512-N8lidFp+fCz+TD51+haYdbDGrcBWwuHX40F5+z0qkUjMJ5Tp+rdSuAkMJ9N9eoolDlEVTf6u5icM+cNKkKW2mA==", + "dev": true + }, + "normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "requires": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } + } + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, + "number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", + "dev": true + }, + "number-to-bn": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/number-to-bn/-/number-to-bn-1.7.0.tgz", + "integrity": "sha1-uzYjWS9+X54AMLGXe9QaDFP+HqA=", + "dev": true, + "requires": { + "bn.js": "4.11.6", + "strip-hex-prefix": "1.0.0" + }, + "dependencies": { + "bn.js": { + "version": "4.11.6", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.6.tgz", + "integrity": "sha1-UzRK2xRhehP26N0s4okF0cC6MhU=", + "dev": true + } + } + }, + "oauth-sign": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" + }, + "object-inspect": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", + "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==", + "dev": true + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + }, + "object.assign": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", + "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", + "dev": true, + "requires": { + "define-properties": "^1.1.2", + "function-bind": "^1.1.1", + "has-symbols": "^1.0.0", + "object-keys": "^1.0.11" + } + }, + "object.getownpropertydescriptors": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.3.tgz", + "integrity": "sha512-VdDoCwvJI4QdC6ndjpqFmoL3/+HxffFBbcJzKi5hwLLqqx3mdbedRpfZDdK0SrOSauj8X4GzBvnDZl4vTN7dOw==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.1" + } + }, + "obliterator": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/obliterator/-/obliterator-2.0.1.tgz", + "integrity": "sha512-XnkiCrrBcIZQitJPAI36mrrpEUvatbte8hLcTcQwKA1v9NkCKasSi+UAguLsLDs/out7MoRzAlmz7VXvY6ph6w==", + "dev": true + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "onetime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", + "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "dev": true, + "requires": { + "mimic-fn": "^1.0.0" + } + }, + "open": { + "version": "7.4.2", + "resolved": "https://registry.npmjs.org/open/-/open-7.4.2.tgz", + "integrity": "sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q==", + "dev": true, + "requires": { + "is-docker": "^2.0.0", + "is-wsl": "^2.1.1" + } + }, + "optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dev": true, + "requires": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + } + }, + "os-locale": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-1.4.0.tgz", + "integrity": "sha1-IPnxeuKe00XoveWDsT0gCYA8FNk=", + "dev": true, + "requires": { + "lcid": "^1.0.0" + } + }, + "os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", + "dev": true + }, + "p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "dev": true, + "requires": { + "p-try": "^1.0.0" + } + }, + "p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "dev": true, + "requires": { + "p-limit": "^1.1.0" + } + }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "dev": true + }, + "parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "requires": { + "callsites": "^3.0.0" + }, + "dependencies": { + "callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true + } + } + }, + "parse-headers": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/parse-headers/-/parse-headers-2.0.4.tgz", + "integrity": "sha512-psZ9iZoCNFLrgRjZ1d8mn0h9WRqJwFxM9q3x7iUjN/YT2OksthDJ5TiPCu2F38kS4zutqfW+YdVVkBZZx3/1aw==" + }, + "parse-json": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "dev": true, + "requires": { + "error-ex": "^1.2.0" + } + }, + "patch-package": { + "version": "6.4.7", + "resolved": "https://registry.npmjs.org/patch-package/-/patch-package-6.4.7.tgz", + "integrity": "sha512-S0vh/ZEafZ17hbhgqdnpunKDfzHQibQizx9g8yEf5dcVk3KOflOfdufRXQX8CSEkyOQwuM/bNz1GwKvFj54kaQ==", + "dev": true, + "requires": { + "@yarnpkg/lockfile": "^1.1.0", + "chalk": "^2.4.2", + "cross-spawn": "^6.0.5", + "find-yarn-workspace-root": "^2.0.0", + "fs-extra": "^7.0.1", + "is-ci": "^2.0.0", + "klaw-sync": "^6.0.0", + "minimist": "^1.2.0", + "open": "^7.4.2", + "rimraf": "^2.6.3", + "semver": "^5.6.0", + "slash": "^2.0.0", + "tmp": "^0.0.33" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + }, + "slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "dev": true + } + } + }, + "path-browserify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz", + "integrity": "sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==", + "dev": true + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "path-is-inside": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", + "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=", + "dev": true + }, + "path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", + "dev": true + }, + "path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" + }, + "path-type": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz", + "integrity": "sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "pify": "^2.0.0", + "pinkie-promise": "^2.0.0" + }, + "dependencies": { + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + } + } + }, + "pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true + }, + "pbkdf2": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz", + "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==", + "requires": { + "create-hash": "^1.1.2", + "create-hmac": "^1.1.4", + "ripemd160": "^2.0.1", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + } + }, + "performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" + }, + "picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" + }, + "picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true + }, + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + }, + "pinkie": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA=", + "dev": true + }, + "pinkie-promise": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "integrity": "sha1-ITXW36ejWMBprJsXh3YogihFD/o=", + "dev": true, + "requires": { + "pinkie": "^2.0.0" + } + }, + "postinstall-postinstall": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/postinstall-postinstall/-/postinstall-postinstall-2.1.0.tgz", + "integrity": "sha512-7hQX6ZlZXIoRiWNrbMQaLzUUfH+sSx39u8EJ9HYuDc1kLo9IXKWjM5RSquZN1ad5GnH8CGFM78fsAAQi3OKEEQ==", + "dev": true + }, + "precond": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/precond/-/precond-0.2.3.tgz", + "integrity": "sha1-qpWRvKokkj8eD0hJ0kD0fvwQdaw=" + }, + "prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", + "dev": true + }, + "prettier": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.5.1.tgz", + "integrity": "sha512-vBZcPRUR5MZJwoyi3ZoyQlc1rXeEck8KgeC9AwwOn+exuxLxq5toTRDTSaVrXHxelDMHy9zlicw8u66yxoSUFg==", + "dev": true + }, + "printj": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/printj/-/printj-1.1.2.tgz", + "integrity": "sha512-zA2SmoLaxZyArQTOPj5LXecR+RagfPSU5Kw1qP+jkWeNlrq+eJZyY2oS68SU1Z/7/myXM4lo9716laOFAVStCQ==", + "dev": true + }, + "process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=" + }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + }, + "progress": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", + "dev": true + }, + "promise-to-callback": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/promise-to-callback/-/promise-to-callback-1.0.0.tgz", + "integrity": "sha1-XSp0kBC/tn2WNZj805YHRqaP7vc=", + "requires": { + "is-fn": "^1.0.0", + "set-immediate-shim": "^1.0.1" + } + }, + "prr": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", + "integrity": "sha1-0/wRS6BplaRexok/SEzrHXj19HY=" + }, + "psl": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", + "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" + }, + "punycode": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.0.tgz", + "integrity": "sha1-X4Y+3Im5bbCQdLrXlHvwkFbKTn0=" + }, + "qs": { + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", + "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", + "dev": true, + "requires": { + "side-channel": "^1.0.4" + } + }, + "querystring": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.1.tgz", + "integrity": "sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg==", + "dev": true + }, + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "requires": { + "safe-buffer": "^5.1.0" + } + }, + "raw-body": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.2.tgz", + "integrity": "sha512-RPMAFUJP19WIet/99ngh6Iv8fzAbqum4Li7AD6DtGaW2RpMB/11xDoalPiJMTbu6I3hkbMVkATvZrqb9EEqeeQ==", + "dev": true, + "requires": { + "bytes": "3.1.1", + "http-errors": "1.8.1", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + } + }, + "read-pkg": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz", + "integrity": "sha1-9f+qXs0pyzHAR0vKfXVra7KePyg=", + "dev": true, + "requires": { + "load-json-file": "^1.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^1.0.0" + } + }, + "read-pkg-up": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz", + "integrity": "sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI=", + "dev": true, + "requires": { + "find-up": "^1.0.0", + "read-pkg": "^1.0.0" + }, + "dependencies": { + "find-up": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz", + "integrity": "sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8=", + "dev": true, + "requires": { + "path-exists": "^2.0.0", + "pinkie-promise": "^2.0.0" + } + }, + "path-exists": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz", + "integrity": "sha1-D+tsZPD8UY2adU3V77YscCJ2H0s=", + "dev": true, + "requires": { + "pinkie-promise": "^2.0.0" + } + } + } + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "requires": { + "picomatch": "^2.2.1" + } + }, + "regenerator-runtime": { + "version": "0.13.9", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", + "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==" + }, + "regexpp": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", + "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", + "dev": true + }, + "request": { + "version": "2.88.2", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", + "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", + "requires": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.3", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.5.0", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + }, + "dependencies": { + "form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, + "qs": { + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", + "integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==" + }, + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + } + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true + }, + "require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true + }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "resolve": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.21.0.tgz", + "integrity": "sha512-3wCbTpk5WJlyE4mSOtDLhqQmGFi0/TD9VPwmiolnk8U0wRgMEktqCXd3vy5buTO3tljvalNvKrjHEfrd2WpEKA==", + "requires": { + "is-core-module": "^2.8.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + } + }, + "resolve-from": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", + "integrity": "sha1-six699nWiBvItuZTM17rywoYh0g=", + "dev": true + }, + "restore-cursor": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", + "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "dev": true, + "requires": { + "onetime": "^2.0.0", + "signal-exit": "^3.0.2" + } + }, + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + }, + "ripemd160": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz", + "integrity": "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==", + "requires": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1" + } + }, + "rlp": { + "version": "2.2.7", + "resolved": "https://registry.npmjs.org/rlp/-/rlp-2.2.7.tgz", + "integrity": "sha512-d5gdPmgQ0Z+AklL2NVXr/IoSjNZFfTVvQWzL/AM2AOcSzYP2xjlb0AC8YyCLc41MSNf6P6QVtjgPdmVtzb+4lQ==", + "requires": { + "bn.js": "^5.2.0" + }, + "dependencies": { + "bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==" + } + } + }, + "run-async": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", + "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==", + "dev": true + }, + "rustbn.js": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/rustbn.js/-/rustbn.js-0.2.0.tgz", + "integrity": "sha512-4VlvkRUuCJvr2J6Y0ImW7NvTCriMi7ErOAqWk1y69vAdoNIzCF3yPmgeNzx+RQTLEDFq5sHfscn1MwHxP9hNfA==" + }, + "rxjs": { + "version": "6.6.7", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.7.tgz", + "integrity": "sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==", + "dev": true, + "requires": { + "tslib": "^1.9.0" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "safe-event-emitter": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/safe-event-emitter/-/safe-event-emitter-1.0.1.tgz", + "integrity": "sha512-e1wFe99A91XYYxoQbcq2ZJUWurxEyP8vfz7A7vuUe1s95q8r5ebraVaA1BukYJcpM6V16ugWoD9vngi8Ccu5fg==", + "requires": { + "events": "^3.0.0" + } + }, + "safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "scrypt-js": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/scrypt-js/-/scrypt-js-3.0.1.tgz", + "integrity": "sha512-cdwTTnqPu0Hyvf5in5asVdZocVDTNRmR7XEcJuIzMjJeSHybHl7vpB66AzwTaIg6CLSbtjcxc8fqcySfnTkccA==" + }, + "secp256k1": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/secp256k1/-/secp256k1-4.0.3.tgz", + "integrity": "sha512-NLZVf+ROMxwtEj3Xa562qgv2BK5e2WNmXPiOdVIPLgs6lyTzMvBq0aWTYMI5XCP9jZMVKOcqZLw/Wc4vDkuxhA==", + "requires": { + "elliptic": "^6.5.4", + "node-addon-api": "^2.0.0", + "node-gyp-build": "^4.2.0" + } + }, + "semaphore": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/semaphore/-/semaphore-1.1.0.tgz", + "integrity": "sha512-O4OZEaNtkMd/K0i6js9SL+gqy0ZCBMgUvlSqHKi4IBdjhe7wB8pwztUk1BbZ1fmrvpwFrPbHzqd2w5pTcJH6LA==" + }, + "semaphore-async-await": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/semaphore-async-await/-/semaphore-async-await-1.5.1.tgz", + "integrity": "sha1-hXvvXjZEYBykuVcLh+nfXKEpdPo=", + "dev": true + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "dev": true + }, + "set-immediate-shim": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz", + "integrity": "sha1-SysbJ+uAip+NzEgaWOXlb1mfP2E=" + }, + "setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU=" + }, + "setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "dev": true + }, + "sha.js": { + "version": "2.4.11", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", + "requires": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "dev": true, + "requires": { + "shebang-regex": "^1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "dev": true + }, + "side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dev": true, + "requires": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + } + }, + "signal-exit": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.6.tgz", + "integrity": "sha512-sDl4qMFpijcGw22U5w63KmD3cZJfBuFlVNbVMKje2keoKML7X2UzWbc4XrmEbDwg0NXJc3yv4/ox7b+JWb57kQ==", + "dev": true + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "slice-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz", + "integrity": "sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "astral-regex": "^1.0.0", + "is-fullwidth-code-point": "^2.0.0" + } + }, + "solc": { + "version": "0.6.12", + "resolved": "https://registry.npmjs.org/solc/-/solc-0.6.12.tgz", + "integrity": "sha512-Lm0Ql2G9Qc7yPP2Ba+WNmzw2jwsrd3u4PobHYlSOxaut3TtUbj9+5ZrT6f4DUpNPEoBaFUOEg9Op9C0mk7ge9g==", + "dev": true, + "requires": { + "command-exists": "^1.2.8", + "commander": "3.0.2", + "fs-extra": "^0.30.0", + "js-sha3": "0.8.0", + "memorystream": "^0.3.1", + "require-from-string": "^2.0.0", + "semver": "^5.5.0", + "tmp": "0.0.33" + }, + "dependencies": { + "fs-extra": { + "version": "0.30.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-0.30.0.tgz", + "integrity": "sha1-8jP/zAjU2n1DLapEl3aYnbHfk/A=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "jsonfile": "^2.1.0", + "klaw": "^1.0.0", + "path-is-absolute": "^1.0.0", + "rimraf": "^2.2.8" + } + }, + "jsonfile": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", + "integrity": "sha1-NzaitCi4e72gzIO1P6PWM6NcKug=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.6" + } + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } + } + }, + "solhint": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/solhint/-/solhint-3.3.6.tgz", + "integrity": "sha512-HWUxTAv2h7hx3s3hAab3ifnlwb02ZWhwFU/wSudUHqteMS3ll9c+m1FlGn9V8ztE2rf3Z82fQZA005Wv7KpcFA==", + "dev": true, + "requires": { + "@solidity-parser/parser": "^0.13.2", + "ajv": "^6.6.1", + "antlr4": "4.7.1", + "ast-parents": "0.0.1", + "chalk": "^2.4.2", + "commander": "2.18.0", + "cosmiconfig": "^5.0.7", + "eslint": "^5.6.0", + "fast-diff": "^1.1.2", + "glob": "^7.1.3", + "ignore": "^4.0.6", + "js-yaml": "^3.12.0", + "lodash": "^4.17.11", + "prettier": "^1.14.3", + "semver": "^6.3.0" + }, + "dependencies": { + "@solidity-parser/parser": { + "version": "0.13.2", + "resolved": "https://registry.npmjs.org/@solidity-parser/parser/-/parser-0.13.2.tgz", + "integrity": "sha512-RwHnpRnfrnD2MSPveYoPh8nhofEvX7fgjHk1Oq+NNvCcLx4r1js91CO9o+F/F3fBzOCyvm8kKRTriFICX/odWw==", + "dev": true, + "requires": { + "antlr4ts": "^0.5.0-alpha.4" + } + }, + "commander": { + "version": "2.18.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.18.0.tgz", + "integrity": "sha512-6CYPa+JP2ftfRU2qkDK+UTVeQYosOg/2GbcjIcKPHfinyOLPVGXu/ovN86RP49Re5ndJK1N0kuiidFFuepc4ZQ==", + "dev": true + }, + "prettier": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", + "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", + "dev": true, + "optional": true + } + } + }, + "source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=" + }, + "source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "dev": true, + "requires": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } + } + }, + "spdx-correct": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", + "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", + "dev": true, + "requires": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-exceptions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", + "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", + "dev": true + }, + "spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "requires": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-license-ids": { + "version": "3.0.11", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.11.tgz", + "integrity": "sha512-Ctl2BrFiM0X3MANYgj3CkygxhRmr9mi6xhejbdO960nF6EDJApTYpn0BQnDKlnNBULKiCN1n3w9EBkHK8ZWg+g==", + "dev": true + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + }, + "sshpk": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", + "integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==", + "requires": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + }, + "dependencies": { + "tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + } + } + }, + "stacktrace-parser": { + "version": "0.1.10", + "resolved": "https://registry.npmjs.org/stacktrace-parser/-/stacktrace-parser-0.1.10.tgz", + "integrity": "sha512-KJP1OCML99+8fhOHxwwzyWrlUuVX5GQ0ZpJTd1DFXhdkrvg1szxfHhawXUZ3g9TkXORQd4/WG68jMlQZ2p8wlg==", + "dev": true, + "requires": { + "type-fest": "^0.7.1" + }, + "dependencies": { + "type-fest": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.7.1.tgz", + "integrity": "sha512-Ne2YiiGN8bmrmJJEuTWTLJR32nh/JdL1+PSicowtNb0WFpn59GK8/lfD61bVtzguz7b3PBt74nxpv/Pw5po5Rg==", + "dev": true + } + } + }, + "statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=", + "dev": true + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "string.prototype.trimend": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz", + "integrity": "sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3" + } + }, + "string.prototype.trimstart": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz", + "integrity": "sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + }, + "strip-bom": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", + "integrity": "sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4=", + "dev": true, + "requires": { + "is-utf8": "^0.2.0" + } + }, + "strip-hex-prefix": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-hex-prefix/-/strip-hex-prefix-1.0.0.tgz", + "integrity": "sha1-DF8VX+8RUTczd96du1iNoFUA428=", + "requires": { + "is-hex-prefixed": "1.0.0" + } + }, + "strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "dev": true + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "requires": { + "has-flag": "^3.0.0" + } + }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==" + }, + "table": { + "version": "5.4.6", + "resolved": "https://registry.npmjs.org/table/-/table-5.4.6.tgz", + "integrity": "sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==", + "dev": true, + "requires": { + "ajv": "^6.10.2", + "lodash": "^4.17.14", + "slice-ansi": "^2.1.0", + "string-width": "^3.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "test-value": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/test-value/-/test-value-2.1.0.tgz", + "integrity": "sha1-Edpv9nDzRxpztiXKTz/c97t0gpE=", + "dev": true, + "requires": { + "array-back": "^1.0.3", + "typical": "^2.6.0" + }, + "dependencies": { + "array-back": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/array-back/-/array-back-1.0.4.tgz", + "integrity": "sha1-ZEun8JX3/898Q7Xw3DnTwfA8Bjs=", + "dev": true, + "requires": { + "typical": "^2.6.0" + } + } + } + }, + "testrpc": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/testrpc/-/testrpc-0.0.1.tgz", + "integrity": "sha512-afH1hO+SQ/VPlmaLUFj2636QMeDvPCeQMc/9RBMW0IfjNe9gFD9Ra3ShqYkB7py0do1ZcCna/9acHyzTJ+GcNA==", + "dev": true + }, + "text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", + "dev": true + }, + "through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", + "dev": true + }, + "tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dev": true, + "requires": { + "os-tmpdir": "~1.0.2" + } + }, + "to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=" + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, + "toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "dev": true + }, + "tough-cookie": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", + "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", + "requires": { + "psl": "^1.1.28", + "punycode": "^2.1.1" + }, + "dependencies": { + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + } + } + }, + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=", + "dev": true + }, + "true-case-path": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/true-case-path/-/true-case-path-2.2.1.tgz", + "integrity": "sha512-0z3j8R7MCjy10kc/g+qg7Ln3alJTodw9aDuVWZa3uiWqfuBMKeAeP2ocWcxoyM3D73yz3Jt/Pu4qPr4wHSdB/Q==", + "dev": true + }, + "truffle-plugin-verify": { + "version": "0.5.20", + "resolved": "https://registry.npmjs.org/truffle-plugin-verify/-/truffle-plugin-verify-0.5.20.tgz", + "integrity": "sha512-s6zG7QbVK5tWPAhRz1oKi/M8SXdRgcWR4PRuHM/BB0qZBcE/82WmnqyC2D/qfqEY+BCgUUWXfc/hyzsgH4dyNw==", + "dev": true, + "requires": { + "axios": "^0.21.1", + "cli-logger": "^0.5.40", + "delay": "^5.0.0", + "querystring": "^0.2.1" + } + }, + "ts-essentials": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/ts-essentials/-/ts-essentials-1.0.4.tgz", + "integrity": "sha512-q3N1xS4vZpRouhYHDPwO0bDW3EZ6SK9CrrDHxi/D6BPReSjpVgWIOpLS2o0gSBZm+7q/wyKp6RVM1AeeW7uyfQ==", + "dev": true + }, + "ts-generator": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/ts-generator/-/ts-generator-0.1.1.tgz", + "integrity": "sha512-N+ahhZxTLYu1HNTQetwWcx3so8hcYbkKBHTr4b4/YgObFTIKkOSSsaa+nal12w8mfrJAyzJfETXawbNjSfP2gQ==", + "dev": true, + "requires": { + "@types/mkdirp": "^0.5.2", + "@types/prettier": "^2.1.1", + "@types/resolve": "^0.0.8", + "chalk": "^2.4.1", + "glob": "^7.1.2", + "mkdirp": "^0.5.1", + "prettier": "^2.1.2", + "resolve": "^1.8.1", + "ts-essentials": "^1.0.0" + } + }, + "tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "tsort": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/tsort/-/tsort-0.0.1.tgz", + "integrity": "sha1-4igPXoF/i/QnVlf9D5rr1E9aJ4Y=", + "dev": true + }, + "tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "requires": { + "safe-buffer": "^5.0.1" + } + }, + "tweetnacl": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-1.0.3.tgz", + "integrity": "sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw==", + "dev": true + }, + "tweetnacl-util": { + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/tweetnacl-util/-/tweetnacl-util-0.15.1.tgz", + "integrity": "sha512-RKJBIj8lySrShN4w6i/BonWp2Z/uxwC3h4y7xsRrpP59ZboCd0GpEVsOnMDYLMmKBpYhb5TgHzZXy7wTfYFBRw==", + "dev": true + }, + "type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "dev": true, + "requires": { + "prelude-ls": "~1.1.2" + } + }, + "type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true + }, + "type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true + }, + "typechain": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/typechain/-/typechain-3.0.0.tgz", + "integrity": "sha512-ft4KVmiN3zH4JUFu2WJBrwfHeDf772Tt2d8bssDTo/YcckKW2D+OwFrHXRC6hJvO3mHjFQTihoMV6fJOi0Hngg==", + "dev": true, + "requires": { + "command-line-args": "^4.0.7", + "debug": "^4.1.1", + "fs-extra": "^7.0.0", + "js-sha3": "^0.8.0", + "lodash": "^4.17.15", + "ts-essentials": "^6.0.3", + "ts-generator": "^0.1.1" + }, + "dependencies": { + "ts-essentials": { + "version": "6.0.7", + "resolved": "https://registry.npmjs.org/ts-essentials/-/ts-essentials-6.0.7.tgz", + "integrity": "sha512-2E4HIIj4tQJlIHuATRHayv0EfMGK3ris/GRk1E3CFnsZzeNV+hUmelbaTZHLtXaZppM5oLhHRtO04gINC4Jusw==", + "dev": true, + "requires": {} + } + } + }, + "typescript": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.5.4.tgz", + "integrity": "sha512-VgYs2A2QIRuGphtzFV7aQJduJ2gyfTljngLzjpfW9FoYZF6xuw1W0vW9ghCKLfcWrCFxK81CSGRAvS1pn4fIUg==", + "dev": true, + "peer": true + }, + "typical": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/typical/-/typical-2.6.1.tgz", + "integrity": "sha1-XAgOXWYcu+OCWdLnCjxyU+hziB0=", + "dev": true + }, + "unbox-primitive": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.1.tgz", + "integrity": "sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "has-bigints": "^1.0.1", + "has-symbols": "^1.0.2", + "which-boxed-primitive": "^1.0.2" + } + }, + "universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "dev": true + }, + "unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=", + "dev": true + }, + "uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "requires": { + "punycode": "^2.1.0" + } + }, + "url": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz", + "integrity": "sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=", + "dev": true, + "requires": { + "punycode": "1.3.2", + "querystring": "0.2.0" + }, + "dependencies": { + "punycode": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=", + "dev": true + }, + "querystring": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=", + "dev": true + } + } + }, + "utf8": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/utf8/-/utf8-3.0.0.tgz", + "integrity": "sha512-E8VjFIQ/TyQgp+TZfS6l8yp/xWppSAHzidGiRrqe4bK4XP9pTRyKFgGJpO3SN7zdX4DeomTrwaseCHovfpFcqQ==" + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + }, + "validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "requires": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "requires": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + }, + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + } + } + }, + "web3-utils": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/web3-utils/-/web3-utils-1.7.0.tgz", + "integrity": "sha512-O8Tl4Ky40Sp6pe89Olk2FsaUkgHyb5QAXuaKo38ms3CxZZ4d3rPGfjP9DNKGm5+IUgAZBNpF1VmlSmNCqfDI1w==", + "dev": true, + "requires": { + "bn.js": "^4.11.9", + "ethereum-bloom-filters": "^1.0.6", + "ethereumjs-util": "^7.1.0", + "ethjs-unit": "0.1.6", + "number-to-bn": "1.7.0", + "randombytes": "^2.1.0", + "utf8": "3.0.0" + }, + "dependencies": { + "ethereumjs-util": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.1.3.tgz", + "integrity": "sha512-y+82tEbyASO0K0X1/SRhbJJoAlfcvq8JbrG4a5cjrOks7HS/36efU/0j2flxCPOUM++HFahk33kr/ZxyC4vNuw==", + "dev": true, + "requires": { + "@types/bn.js": "^5.1.0", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "rlp": "^2.2.4" + }, + "dependencies": { + "bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true + } + } + } + } + }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=", + "dev": true + }, + "whatwg-fetch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-2.0.4.tgz", + "integrity": "sha512-dcQ1GWpOD/eEQ97k66aiEVpNnapVj90/+R+SXTPYGHpYBBypfKJEQjLrvMZ7YXbKm21gXd4NcuxUTjiv1YtLng==" + }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", + "dev": true, + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "which-boxed-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "dev": true, + "requires": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + } + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "dev": true + }, + "wide-align": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", + "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", + "dev": true, + "requires": { + "string-width": "^1.0.2 || 2" + } + }, + "window-size": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.2.0.tgz", + "integrity": "sha1-tDFbtCFKPXBY6+7okuE/ok2YsHU=", + "dev": true + }, + "word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "dev": true + }, + "wrap-ansi": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "write": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/write/-/write-1.0.3.tgz", + "integrity": "sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==", + "dev": true, + "requires": { + "mkdirp": "^0.5.1" + } + }, + "ws": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-5.2.3.tgz", + "integrity": "sha512-jZArVERrMsKUatIdnLzqvcfydI85dvd/Fp1u/VOpfdDWQ4c9qWXe+VIeAbQ5FrDwciAkr+lzofXLz3Kuf26AOA==", + "requires": { + "async-limiter": "~1.0.0" + } + }, + "xhr": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/xhr/-/xhr-2.6.0.tgz", + "integrity": "sha512-/eCGLb5rxjx5e3mF1A7s+pLlR6CGyqWN91fv1JgER5mVWg1MZmlhBvy9kjcsOdRk8RrIujotWyJamfyrp+WIcA==", + "requires": { + "global": "~4.4.0", + "is-function": "^1.0.1", + "parse-headers": "^2.0.0", + "xtend": "^4.0.0" + } + }, + "xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" + }, + "y18n": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", + "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==", + "dev": true + }, + "yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true + }, + "yargs": { + "version": "13.3.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", + "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", + "dev": true, + "requires": { + "cliui": "^5.0.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^13.1.2" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "yargs-parser": { + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", + "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", + "dev": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + }, + "yargs-unparser": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-1.6.0.tgz", + "integrity": "sha512-W9tKgmSn0DpSatfri0nx52Joq5hVXgeLiqR/5G0sZNDoLZFOr/xjBUDcShCOGNsBnEMNo1KAMBkTej1Hm62HTw==", + "dev": true, + "requires": { + "flat": "^4.1.0", + "lodash": "^4.17.15", + "yargs": "^13.3.0" + } + } + } +} diff --git a/contracts/package.json b/contracts/package.json new file mode 100644 index 0000000000..028f73d313 --- /dev/null +++ b/contracts/package.json @@ -0,0 +1,40 @@ +{ + "name": "bbgo-contracts", + "version": "1.0.0", + "description": "", + "directories": { + "test": "test" + }, + "dependencies": { + "@truffle/hdwallet-provider": "1.4" + }, + "devDependencies": { + "@nomiclabs/hardhat-ethers": "^2.0.2", + "@nomiclabs/hardhat-etherscan": "^2.1.1", + "@nomiclabs/hardhat-waffle": "^2.0.1", + "@openzeppelin/contracts": "^3.2.0", + "chai": "^4.3.4", + "ethereum-waffle": "^3.4.0", + "ethers": "^5.4.7", + "hardhat": "^2.6.5", + "prettier": "^2.5.1", + "solhint": "^3.3.6", + "truffle-plugin-verify": "^0.5.18" + }, + "scripts": { + "devserver": "KEY=development-secret.json truffle develop", + "test": "KEY=development-secret.json truffle test", + "migrate:dev": "KEY=development-secret.json truffle migrate --network development", + "migrate:polygon": "KEY=polygon-secret.json truffle migrate --network polygon", + "migrate:polygon-test": "KEY=polygon-secret.json truffle migrate --network mumbai", + "migrate:bsc": "KEY=bsc-secret.json truffle migrate --network bsc", + "migrate:bsc-test": "KEY=bsc-secret.json truffle migrate --network bsctestnet", + "lint": "npm run lint:sol && npm run lint:js", + "lint:js:fix": "prettier --write test/**/*.js", + "lint:js": "prettier test/**/*.js", + "lint:sol": "solhint contracts/**/*.sol", + "lint:sol:fix": "solhint -d contracts/**/*.sol --fix" + }, + "author": "starcrypto", + "license": "MIT" +} diff --git a/contracts/polygon-secret.json b/contracts/polygon-secret.json new file mode 100644 index 0000000000..df858f2130 --- /dev/null +++ b/contracts/polygon-secret.json @@ -0,0 +1,4 @@ +{ + "privateKey": "3899a918953e01bfe218116cdfeccbed579e26275c4a89abcbc70d2cb9e9bbb8", + "etherScanApiKey": "" +} diff --git a/contracts/test/erc20.js b/contracts/test/erc20.js new file mode 100644 index 0000000000..649a91ff31 --- /dev/null +++ b/contracts/test/erc20.js @@ -0,0 +1,16 @@ +const ChildMintableERC20 = artifacts.require("ChildMintableERC20"); +contract("ChildMintableERC20", (accounts) => { + it("should have BBG deployed", async () => { + const instance = await ChildMintableERC20.deployed(); + const name = await instance.name(); + const decimal = await instance.decimals(); + const symbol = await instance.symbol(); + const balance = await instance.balanceOf(accounts[0]); + const totalSupply = await instance.totalSupply(); + assert.equal(name.valueOf(), "BBGO"); + assert.equal(symbol.valueOf(), "BBG"); + assert.equal(decimal.toNumber(), 18); + assert.equal(balance.toNumber(), 0); + assert.equal(totalSupply.toNumber(), 0); + }); +}); diff --git a/contracts/truffle-config.js b/contracts/truffle-config.js new file mode 100644 index 0000000000..dbba2f0a8d --- /dev/null +++ b/contracts/truffle-config.js @@ -0,0 +1,133 @@ +/** + * Use this file to configure your truffle project. It's seeded with some + * common settings for different networks and features like migrations, + * compilation and testing. Uncomment the ones you need or modify + * them to suit your project as necessary. + * + * More information about configuration can be found at: + * + * trufflesuite.com/docs/advanced/configuration + * + * To deploy via Infura you'll need a wallet provider (like @truffle/hdwallet-provider) + * to sign your transactions before they're sent to a remote public node. Infura accounts + * are available for free at: infura.io/register. + * + * You'll also need a mnemonic - the twelve word phrase the wallet uses to generate + * public/private key pairs. If you're publishing your code to GitHub make sure you load this + * phrase from a file you've .gitignored so it doesn't accidentally become public. + * + */ +const fs = require('fs'); +const HDWalletProvider = require('@truffle/hdwallet-provider'); +const secret = JSON.parse(fs.readFileSync(process.env.KEY).toString().trim()); + + +module.exports = { + /** + * Networks define how you connect to your ethereum client and let you set the + * defaults web3 uses to send transactions. If you don't specify one truffle + * will spin up a development blockchain for you on port 9545 when you + * run `develop` or `test`. You can ask a truffle command to use a specific + * network from the command line, e.g + * + * $ truffle test --network + */ + + networks: { + // Useful for testing. The `development` name is special - truffle uses it by default + // if it's defined here and no other network is specified at the command line. + // You should run a client (like ganache-cli, geth or parity) in a separate terminal + // tab if you use this network and you must also set the `host`, `port` and `network_id` + // options below to some value. + // + // development: { + // host: "127.0.0.1", // Localhost (default: none) + // port: 8545, // Standard Ethereum port (default: none) + // network_id: "*", // Any network (default: none) + // }, + // Another network with more advanced options... + // advanced: { + // port: 8777, // Custom port + // network_id: 1342, // Custom network + // gas: 8500000, // Gas sent with each transaction (default: ~6700000) + // gasPrice: 20000000000, // 20 gwei (in wei) (default: 100 gwei) + // from:
, // Account to send txs from (default: accounts[0]) + // websocket: true // Enable EventEmitter interface for web3 (default: false) + // }, + // Useful for deploying to a public network. + // NB: It's important to wrap the provider as a function. + // ropsten: { + // provider: () => new HDWalletProvider(mnemonic, `https://ropsten.infura.io/v3/YOUR-PROJECT-ID`), + // network_id: 3, // Ropsten's id + // gas: 5500000, // Ropsten has a lower block limit than mainnet + // confirmations: 2, // # of confs to wait between deployments. (default: 0) + // timeoutBlocks: 200, // # of blocks before a deployment times out (minimum/default: 50) + // skipDryRun: true // Skip dry run before migrations? (default: false for public nets ) + // }, + // Useful for private networks + // private: { + // provider: () => new HDWalletProvider(mnemonic, `https://network.io`), + // network_id: 2111, // This network is yours, in the cloud. + // production: true // Treats this network as if it was a public net. (default: false) + // } + development: { + host: "127.0.0.1", + port: 9545, + network_id: "*", + }, + polygon: { + provider: () => new HDWalletProvider(secret.privateKey, "https://polygon-rpc.com"), + network_id: 137, + confirmations: 3, + timeoutBlocks: 200, + skipDryRun: true + }, + mumbai: { + provider: () => new HDWalletProvider(secret.privateKey, "https://matic-mumbai.chainstacklabs.com"), + network_id: 80001, + confirmations: 3, + timeoutBlocks: 200, + skipDryRun: true + }, + bsctestnet: { + // "https://speedy-nodes-nyc.moralis.io/91d001d6e2a55a9696521b4b/bsc/testnet" + provider: () => new HDWalletProvider(secret.privateKey, "https://data-seed-prebsc-1-s1.binance.org:8545"), + network_id: 97, + confirmations: 10, + timeoutBlocks: 200, + }, + bsc: { + provider: () => new HDWalletProvider(secret.privateKey, "https://bsc-dataseed1.binance.org"), + network_id: 56, + confirmations: 10, + timeoutBlocks: 200, + skipDryRun: true + }, + }, + + // Set default mocha options here, use special reporters etc. + mocha: { + // timeout: 100000 + }, + + // Configure your compilers + compilers: { + solc: { + version: "0.6.6", // Fetch exact version from solc-bin (default: truffle's version) + // docker: true, // Use "0.5.1" you've installed locally with docker (default: false) + // settings: { // See the solidity docs for advice about optimization and evmVersion + // optimizer: { + // enabled: false, + // runs: 200 + // }, + // evmVersion: "byzantium" + // } + } + }, + plugins: [ + 'truffle-plugin-verify' + ], + api_keys: { + etherscan: secret.etherScanApiKey + } +}; diff --git a/deploy.sh b/deploy.sh new file mode 100755 index 0000000000..e8c08b8b56 --- /dev/null +++ b/deploy.sh @@ -0,0 +1,136 @@ +#!/bin/bash +set -e + +target=$1 + +# bin_type is the binary type that you want to build bbgo +# use "bbgo" for full-features binary (including web application) +# use "bbgo-slim" for slim version binary (without web application) +bin_type=bbgo-slim + +# host_bin_dir is the directory that binary file will be uploaded to. +# default to $HOME/bin +host_bin_dir=bin + +host=bbgo +host_user=root +host_home=/root + +host_systemd_service_dir=/etc/systemd/system +host_os=linux +host_arch=amd64 + +# setup_host_systemd_service: should we create a new systemd service file if it does not exist? +# change this to "yes" to enable the automatic setup. +# if setup_host_systemd_service is enabled, the script will create a systemd service file from a template +# and then upload the systemd service file to $host_systemd_service_dir, +# root permission might be needed, you can change the host user to root temporarily while setting up the environment. +setup_host_systemd_service=no +if [[ -n $SETUP_SYSTEMD ]] ; then + setup_host_systemd_service=yes +fi + + +# use the git describe as the binary version, you may override this with something else. +tag=$(git describe --tags) + +RED='\033[1;31m' +GREEN='\033[1;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +function remote_test() { + ssh $host "(test $* && echo yes)" +} + +function remote_run() { + ssh $host "$*" +} + +function remote_eval() { + ssh $host "echo $*" +} + +function warn() { + echo "${YELLOW}$*${NC}" +} + +function error() { + echo "${RED}$*${NC}" +} + +function info() { + echo "${GREEN}$@${NC}" +} + +if [[ -n $BBGO_HOST ]]; then + host=$BBGO_HOST +else + warn "env var BBGO_HOST is not set, using \"bbgo\" host alias as the default host, you can add \"bbgo\" to your ~/.ssh/config file" + host=bbgo +fi + +if [[ -z $target ]]; then + echo "Usage: $0 [target]" + echo "target name is required" + exit 1 +fi + +# initialize the remote environment +# create the directory for placing binaries +ssh $host "mkdir -p \$HOME/$host_bin_dir && mkdir -p \$HOME/$target" + +if [[ $(remote_test "-e $host_systemd_service_dir/$target.service") != "yes" ]]; then + if [[ "$setup_host_systemd_service" == "no" ]]; then + error "The systemd $target.service on host $host is not configured, can not deploy" + exit 1 + fi + + warn "$host_systemd_service_dir/$target.service does not exist, setting up..." + + if [[ -z $host_home ]]; then + host_home=$(remote_eval "\$HOME") + fi + + cat <".systemd.$target.service" +[Unit] +After=network-online.target +Wants=network-online.target + +[Install] +WantedBy=multi-user.target + +[Service] +WorkingDirectory=$host_home/$target +KillMode=process +ExecStart=$host_home/$target/bbgo run +User=$host_user +Restart=always +RestartSec=30 +END + + info "uploading systemd service file..." + scp ".systemd.$target.service" "$host:$host_systemd_service_dir/$target.service" + + info "reloading systemd daemon..." + remote_run "sudo systemctl daemon-reload && systemctl enable $target" +fi + +info "building binary: $bin_type-$host_os-$host_arch..." +make $bin_type-$host_os-$host_arch + +# copy the binary to the server +info "deploying..." +info "copying binary to host $host..." + +if [[ $(remote_test "-e $host_bin_dir/bbgo-$tag") != "yes" ]] ; then + scp build/bbgo/$bin_type-$host_os-$host_arch $host:$host_bin_dir/bbgo-$tag +else + info "binary $host_bin_dir/bbgo-$tag already exists, we will use the existing one" +fi + +# link binary and restart the systemd service +info "linking binary and restarting..." +ssh $host "(cd $target && ln -sf \$HOME/$host_bin_dir/bbgo-$tag bbgo && sudo systemctl restart $target.service)" + +info "deployed successfully!" diff --git a/desktop/build-darwin.sh b/desktop/build-darwin.sh new file mode 100644 index 0000000000..765f28e9a2 --- /dev/null +++ b/desktop/build-darwin.sh @@ -0,0 +1,23 @@ +#!/bin/sh +APP="BBGO.app" +APP_DIR=build/$APP + +go build -o $APP_DIR/Contents/MacOS/bbgo-desktop ./cmd/bbgo-desktop + +cat > $APP_DIR/Contents/Info.plist << EOF + + + + + CFBundleExecutable + bbgo-desktop + CFBundleIconFile + icon.icns + CFBundleIdentifier + com.bbgo.lorca + + +EOF + +cp -v desktop/icons/icon.icns $APP_DIR/Contents/Resources/icon.icns +find $APP_DIR diff --git a/desktop/build-osx-info-plist.sh b/desktop/build-osx-info-plist.sh new file mode 100755 index 0000000000..3197fb9690 --- /dev/null +++ b/desktop/build-osx-info-plist.sh @@ -0,0 +1,21 @@ +#!/bin/bash +cat << EOF + + + + + CFBundleExecutable + bbgo-desktop + CFBundleIconFile + icon.icns + CFBundleIdentifier + com.bbgo.lorca + LSFileQuarantineEnabled + + NSUserNotificationAlertStyle + banner + NSHighResolutionCapable + + + +EOF diff --git a/desktop/icons/icon-256.png b/desktop/icons/icon-256.png new file mode 100644 index 0000000000..a1dfbef63e Binary files /dev/null and b/desktop/icons/icon-256.png differ diff --git a/desktop/icons/icon.icns b/desktop/icons/icon.icns new file mode 100644 index 0000000000..d009f9404e Binary files /dev/null and b/desktop/icons/icon.icns differ diff --git a/desktop/icons/icon.png b/desktop/icons/icon.png new file mode 100644 index 0000000000..09383e27e0 Binary files /dev/null and b/desktop/icons/icon.png differ diff --git a/doc/README.md b/doc/README.md new file mode 100644 index 0000000000..ade7d4089a --- /dev/null +++ b/doc/README.md @@ -0,0 +1,32 @@ +# BBGO Documentation Index +-------------------------- + +### General Topics +* [Commands](commands/bbgo.md) - BBGO command line usage +* [Build From Source](build-from-source.md) - How to build bbgo +* [Back-testing](topics/back-testing.md) - How to back-test strategies +* [TWAP](topics/twap.md) - TWAP order execution to buy/sell large quantity of order +* [Dnum Installation](topics/dnum-binary.md) - installation of high-precision version of bbgo + +### Configuration +* [Setting up Slack Notification](configuration/slack.md) +* [Setting up Telegram Notification](configuration/telegram.md) - Setting up Telegram Bot Notification +* [Environment Variables](configuration/envvars.md) +* [Syncing Trading Data](configuration/sync.md) - Synchronize private trading data + +### Deployment +* [Helm Chart Deployment](deployment/helm-chart.md) +* [Setting up Systemd](deployment/systemd.md) + +### Strategies +* [Grid](strategy/grid.md) - Grid Strategy Explanation +* [Interaction](strategy/interaction.md) - Interaction registration for strategies +* [Price Alert](strategy/pricealert.md) - Send price alert notification on price changes +* [Supertrend](strategy/supertrend.md) - Supertrend strategy uses Supertrend indicator as trend, and DEMA indicator as noise filter +* [Support](strategy/support.md) - Support strategy that buys on high volume support + +### Development +* [Adding New Exchange](development/adding-new-exchange.md) - Check lists for adding new exchanges +* [KuCoin Command-line Test Tool](development/kucoin-cli.md) - Kucoin command-line tools +* [SQL Migration](development/migration.md) - Adding new SQL migration scripts +* [Release Process](development/release-process.md) - How to make a new release diff --git a/doc/build-from-source.md b/doc/build-from-source.md new file mode 100644 index 0000000000..d8057ee48a --- /dev/null +++ b/doc/build-from-source.md @@ -0,0 +1,133 @@ +# Build From Source + +## Install Go SDK + +Go to the Go official website to download the Go SDK . + +An example installation looks like this: + +```shell +wget https://go.dev/dl/go1.17.4.linux-amd64.tar.gz +sudo rm -rf /usr/local/go +sudo tar -C /usr/local -xzf go1.17.4.linux-amd64.tar.gz +``` + +Then edit your ~/.profile or ~/.bashrc to have this line at the end: + +```shell +export PATH=$PATH:/usr/local/go/bin +``` + +For the changes to be taken into action, you need to log in again, or run: + +```shell +source $HOME/.profile +``` + +Make sure your `go` is successfully installed: + +```shell +go version +``` + +## Install go-sqlite + +If you need to use go-sqlite, you will need to enable CGO first: + +``` +CGO_ENABLED=1 go get github.com/mattn/go-sqlite3 +``` + +## Install + +### Install bbgo via go install + +Install bbgo: + +```sh +go install -x github.com/c9s/bbgo/cmd/bbgo@main +``` + +Your binary will be installed into the default GOPATH `~/go/bin`. +You can add the bin path to your PATH env var by adding the following code to your `~/.zshrc` or `~/.bashrc`: + +```shell +export PATH=~/go/bin:$PATH +``` + +And then, check the version, it should be `1.x-dev`: + +```shell +bbgo version +``` + +If not, try running `ls -lh ~/go/bin/bbgo` to see if the binary is installed. +If it's already there, it means your PATH is misconfigured. + +If you prefer other place for installing the go related binaries, you can set GOPATH to somewhere else, e.g. + +```shell +export GOPATH=~/mygo +``` + +Then your bbgo will be installed at `~/mygo/bin/bbgo`. + +### Install via git clone + +Since the default GOPATH is located at `~/go`, you can clone the bbgo repo into the folder `~/go/src/github.com/c9s/bbgo`: + +```shell +mkdir -p ~/go/src/github.com/c9s +git clone git@github.com:c9s/bbgo.git ~/go/src/github.com/c9s/bbgo +cd ~/go/src/github.com/c9s/bbgo +``` + +Download the go modules: + +```shell +go mod download +``` + +And then you should be able to run bbgo with `go run` + +```shell +go run ./cmd/bbgo run +``` + +You can also use the makefile to build bbgo: + +```shell +cd frontend && yarn install +make bbgo +``` + +If you don't need the web interface, you can build the slim version of bbgo: + +```shell +make bbgo-slim +``` + +## Build inside a Alpine container + +Starts a docker container with the alpine image: + +```shell +docker run -it --rm alpine +``` + +Run the following command to install the dependencies: + +```shell +apk add git go gcc libc-dev sqlite +export CGO_ENABLED=1 +go get github.com/mattn/go-sqlite3 +go install github.com/c9s/bbgo/cmd/bbgo@latest +``` + +Your installed bbgo binary will be located in: + +``` +/root/go/bin/bbgo version +``` + +You can use the above instruction to write your own Dockerfile. diff --git a/doc/commands/bbgo.md b/doc/commands/bbgo.md new file mode 100644 index 0000000000..1f3e6f89a1 --- /dev/null +++ b/doc/commands/bbgo.md @@ -0,0 +1,61 @@ +## bbgo + +bbgo is a crypto trading bot + +``` +bbgo [flags] +``` + +### Options + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + -h, --help help for bbgo + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo account](bbgo_account.md) - show user account details (ex: balance) +* [bbgo backtest](bbgo_backtest.md) - run backtest with strategies +* [bbgo balances](bbgo_balances.md) - Show user account balances +* [bbgo build](bbgo_build.md) - build cross-platform binary +* [bbgo cancel-order](bbgo_cancel-order.md) - cancel orders +* [bbgo deposits](bbgo_deposits.md) - A testing utility that will query deposition history in last 7 days +* [bbgo execute-order](bbgo_execute-order.md) - execute buy/sell on the balance/position you have on specific symbol +* [bbgo get-order](bbgo_get-order.md) - Get order status +* [bbgo kline](bbgo_kline.md) - connect to the kline market data streaming service of an exchange +* [bbgo list-orders](bbgo_list-orders.md) - list user's open orders in exchange of a specific trading pair +* [bbgo margin](bbgo_margin.md) - margin related history +* [bbgo market](bbgo_market.md) - List the symbols that the are available to be traded in the exchange +* [bbgo optimize](bbgo_optimize.md) - run optimizer +* [bbgo orderbook](bbgo_orderbook.md) - connect to the order book market data streaming service of an exchange +* [bbgo orderupdate](bbgo_orderupdate.md) - Listen to order update events +* [bbgo pnl](bbgo_pnl.md) - Average Cost Based PnL Calculator +* [bbgo run](bbgo_run.md) - run strategies from config file +* [bbgo submit-order](bbgo_submit-order.md) - place order to the exchange +* [bbgo sync](bbgo_sync.md) - sync trades and orders history +* [bbgo trades](bbgo_trades.md) - Query trading history +* [bbgo tradeupdate](bbgo_tradeupdate.md) - Listen to trade update events +* [bbgo transfer-history](bbgo_transfer-history.md) - show transfer history +* [bbgo userdatastream](bbgo_userdatastream.md) - Listen to session events (orderUpdate, tradeUpdate, balanceUpdate, balanceSnapshot) +* [bbgo version](bbgo_version.md) - show version name + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_account.md b/doc/commands/bbgo_account.md new file mode 100644 index 0000000000..96c7d8e2a3 --- /dev/null +++ b/doc/commands/bbgo_account.md @@ -0,0 +1,45 @@ +## bbgo account + +show user account details (ex: balance) + +``` +bbgo account [--session SESSION] [flags] +``` + +### Options + +``` + -h, --help help for account + --session string the exchange session name for querying information + --total report total asset +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_backtest.md b/doc/commands/bbgo_backtest.md new file mode 100644 index 0000000000..045d5c4c31 --- /dev/null +++ b/doc/commands/bbgo_backtest.md @@ -0,0 +1,54 @@ +## bbgo backtest + +run backtest with strategies + +``` +bbgo backtest [flags] +``` + +### Options + +``` + --base-asset-baseline use base asset performance as the competitive baseline performance + --force force execution without confirm + -h, --help help for backtest + --output string the report output directory + --session string specify only one exchange session to run backtest + --subdir generate report in the sub-directory of the output directory + --sync sync backtest data + --sync-exchange string specify only one exchange to sync backtest data + --sync-from string sync backtest data from the given time, which will override the time range in the backtest config + --sync-only sync backtest data only, do not run backtest + -v, --verbose count verbose level + --verify verify the kline back-test data +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_balances.md b/doc/commands/bbgo_balances.md new file mode 100644 index 0000000000..727f2939c3 --- /dev/null +++ b/doc/commands/bbgo_balances.md @@ -0,0 +1,44 @@ +## bbgo balances + +Show user account balances + +``` +bbgo balances [--session SESSION] [flags] +``` + +### Options + +``` + -h, --help help for balances + --session string the exchange session name for querying balances +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_build.md b/doc/commands/bbgo_build.md new file mode 100644 index 0000000000..5ed10a46b6 --- /dev/null +++ b/doc/commands/bbgo_build.md @@ -0,0 +1,43 @@ +## bbgo build + +build cross-platform binary + +``` +bbgo build [flags] +``` + +### Options + +``` + -h, --help help for build +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_cancel-order.md b/doc/commands/bbgo_cancel-order.md new file mode 100644 index 0000000000..efd22f1141 --- /dev/null +++ b/doc/commands/bbgo_cancel-order.md @@ -0,0 +1,53 @@ +## bbgo cancel-order + +cancel orders + +### Synopsis + +this command can cancel orders from exchange + +``` +bbgo cancel-order [flags] +``` + +### Options + +``` + --all cancel all orders + --group-id int group ID to cancel orders + -h, --help help for cancel-order + --order-id uint order ID to cancel orders + --order-uuid string order UUID to cancel orders + --session string session to execute cancel orders + --symbol string symbol to cancel orders +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_deposits.md b/doc/commands/bbgo_deposits.md new file mode 100644 index 0000000000..9e0fd6dd5e --- /dev/null +++ b/doc/commands/bbgo_deposits.md @@ -0,0 +1,45 @@ +## bbgo deposits + +A testing utility that will query deposition history in last 7 days + +``` +bbgo deposits [flags] +``` + +### Options + +``` + --asset string the trading pair, like btcusdt + -h, --help help for deposits + --session string the exchange session name for querying balances +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_execute-order.md b/doc/commands/bbgo_execute-order.md new file mode 100644 index 0000000000..e276d34a92 --- /dev/null +++ b/doc/commands/bbgo_execute-order.md @@ -0,0 +1,52 @@ +## bbgo execute-order + +execute buy/sell on the balance/position you have on specific symbol + +``` +bbgo execute-order --session SESSION --symbol SYMBOL --side SIDE --target-quantity TOTAL_QUANTITY --slice-quantity SLICE_QUANTITY [flags] +``` + +### Options + +``` + --deadline duration deadline of the order execution + -h, --help help for execute-order + --price-ticks int the number of price tick for the jump spread, default to 0 + --session string the exchange session name for sync + --side string the trading side: buy or sell + --slice-quantity string slice quantity + --stop-price string stop price (default "0") + --symbol string the trading pair, like btcusdt + --target-quantity string target quantity + --update-interval duration order update time (default 10s) +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_get-order.md b/doc/commands/bbgo_get-order.md new file mode 100644 index 0000000000..ca8c302100 --- /dev/null +++ b/doc/commands/bbgo_get-order.md @@ -0,0 +1,46 @@ +## bbgo get-order + +Get order status + +``` +bbgo get-order --session SESSION --order-id ORDER_ID [flags] +``` + +### Options + +``` + -h, --help help for get-order + --order-id string order id + --session string the exchange session name for sync + --symbol string the trading pair, like btcusdt +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_kline.md b/doc/commands/bbgo_kline.md new file mode 100644 index 0000000000..9d20d5753c --- /dev/null +++ b/doc/commands/bbgo_kline.md @@ -0,0 +1,46 @@ +## bbgo kline + +connect to the kline market data streaming service of an exchange + +``` +bbgo kline [flags] +``` + +### Options + +``` + -h, --help help for kline + --interval string interval of the kline (candle), .e.g, 1m, 3m, 15m (default "1m") + --session string session name + --symbol string the trading pair. e.g, BTCUSDT, LTCUSDT... +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_list-orders.md b/doc/commands/bbgo_list-orders.md new file mode 100644 index 0000000000..fb9e8a237b --- /dev/null +++ b/doc/commands/bbgo_list-orders.md @@ -0,0 +1,45 @@ +## bbgo list-orders + +list user's open orders in exchange of a specific trading pair + +``` +bbgo list-orders open|closed --session SESSION --symbol SYMBOL [flags] +``` + +### Options + +``` + -h, --help help for list-orders + --session string the exchange session name for sync + --symbol string the trading pair, like btcusdt +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_margin.md b/doc/commands/bbgo_margin.md new file mode 100644 index 0000000000..e7d422a948 --- /dev/null +++ b/doc/commands/bbgo_margin.md @@ -0,0 +1,42 @@ +## bbgo margin + +margin related history + +### Options + +``` + -h, --help help for margin +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot +* [bbgo margin interests](bbgo_margin_interests.md) - query interests history +* [bbgo margin loans](bbgo_margin_loans.md) - query loans history +* [bbgo margin repays](bbgo_margin_repays.md) - query repay history + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_margin_interests.md b/doc/commands/bbgo_margin_interests.md new file mode 100644 index 0000000000..81049e8391 --- /dev/null +++ b/doc/commands/bbgo_margin_interests.md @@ -0,0 +1,45 @@ +## bbgo margin interests + +query interests history + +``` +bbgo margin interests --session=SESSION_NAME --asset=ASSET [flags] +``` + +### Options + +``` + --asset string asset + -h, --help help for interests + --session string exchange session name +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo margin](bbgo_margin.md) - margin related history + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_margin_loans.md b/doc/commands/bbgo_margin_loans.md new file mode 100644 index 0000000000..0ccf268e3e --- /dev/null +++ b/doc/commands/bbgo_margin_loans.md @@ -0,0 +1,45 @@ +## bbgo margin loans + +query loans history + +``` +bbgo margin loans --session=SESSION_NAME --asset=ASSET [flags] +``` + +### Options + +``` + --asset string asset + -h, --help help for loans + --session string exchange session name +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo margin](bbgo_margin.md) - margin related history + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_margin_repays.md b/doc/commands/bbgo_margin_repays.md new file mode 100644 index 0000000000..5a1d8f4a49 --- /dev/null +++ b/doc/commands/bbgo_margin_repays.md @@ -0,0 +1,45 @@ +## bbgo margin repays + +query repay history + +``` +bbgo margin repays --session=SESSION_NAME --asset=ASSET [flags] +``` + +### Options + +``` + --asset string asset + -h, --help help for repays + --session string exchange session name +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo margin](bbgo_margin.md) - margin related history + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_market.md b/doc/commands/bbgo_market.md new file mode 100644 index 0000000000..5bd9907b66 --- /dev/null +++ b/doc/commands/bbgo_market.md @@ -0,0 +1,44 @@ +## bbgo market + +List the symbols that the are available to be traded in the exchange + +``` +bbgo market [flags] +``` + +### Options + +``` + -h, --help help for market + --session string the exchange session name for querying information +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_optimize.md b/doc/commands/bbgo_optimize.md new file mode 100644 index 0000000000..f29fcb3ed6 --- /dev/null +++ b/doc/commands/bbgo_optimize.md @@ -0,0 +1,46 @@ +## bbgo optimize + +run optimizer + +``` +bbgo optimize [flags] +``` + +### Options + +``` + -h, --help help for optimize + --json print optimizer metrics in json format + --optimizer-config string config file (default "optimizer.yaml") + --output string backtest report output directory (default "output") +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_orderbook.md b/doc/commands/bbgo_orderbook.md new file mode 100644 index 0000000000..b81ccbaece --- /dev/null +++ b/doc/commands/bbgo_orderbook.md @@ -0,0 +1,46 @@ +## bbgo orderbook + +connect to the order book market data streaming service of an exchange + +``` +bbgo orderbook --session=[exchange_name] --symbol=[pair_name] [flags] +``` + +### Options + +``` + --dump-update dump the depth update + -h, --help help for orderbook + --session string session name + --symbol string the trading pair. e.g, BTCUSDT, LTCUSDT... +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_orderupdate.md b/doc/commands/bbgo_orderupdate.md new file mode 100644 index 0000000000..6f1a9397b6 --- /dev/null +++ b/doc/commands/bbgo_orderupdate.md @@ -0,0 +1,44 @@ +## bbgo orderupdate + +Listen to order update events + +``` +bbgo orderupdate [flags] +``` + +### Options + +``` + -h, --help help for orderupdate + --session string session name +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_pnl.md b/doc/commands/bbgo_pnl.md new file mode 100644 index 0000000000..ce0cf85370 --- /dev/null +++ b/doc/commands/bbgo_pnl.md @@ -0,0 +1,51 @@ +## bbgo pnl + +Average Cost Based PnL Calculator + +### Synopsis + +This command calculates the average cost-based profit from your total trades + +``` +bbgo pnl [flags] +``` + +### Options + +``` + -h, --help help for pnl + --include-transfer convert transfer records into trades + --limit int number of trades + --session string target exchange + --symbol string trading symbol +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_run.md b/doc/commands/bbgo_run.md new file mode 100644 index 0000000000..73f52f3b08 --- /dev/null +++ b/doc/commands/bbgo_run.md @@ -0,0 +1,54 @@ +## bbgo run + +run strategies from config file + +``` +bbgo run [flags] +``` + +### Options + +``` + --enable-grpc enable grpc server + --enable-web-server legacy option, this is renamed to --enable-webserver + --enable-webserver enable webserver + --grpc-bind string grpc server binding (default ":50051") + -h, --help help for run + --no-compile do not compile wrapper binary + --no-sync do not sync on startup + --setup use setup mode + --totp-account-name string + --totp-issuer string + --totp-key-url string time-based one-time password key URL, if defined, it will be used for restoring the otp key + --webserver-bind string webserver binding (default ":8080") +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_submit-order.md b/doc/commands/bbgo_submit-order.md new file mode 100644 index 0000000000..eebe36247c --- /dev/null +++ b/doc/commands/bbgo_submit-order.md @@ -0,0 +1,48 @@ +## bbgo submit-order + +place order to the exchange + +``` +bbgo submit-order --session SESSION --symbol SYMBOL --side SIDE --quantity QUANTITY [--price PRICE] [flags] +``` + +### Options + +``` + -h, --help help for submit-order + --price string the trading price + --quantity string the trading quantity + --session string the exchange session name for sync + --side string the trading side: buy or sell + --symbol string the trading pair, like btcusdt +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_sync.md b/doc/commands/bbgo_sync.md new file mode 100644 index 0000000000..70d85fb01b --- /dev/null +++ b/doc/commands/bbgo_sync.md @@ -0,0 +1,46 @@ +## bbgo sync + +sync trades and orders history + +``` +bbgo sync [--session=[exchange_name]] [--symbol=[pair_name]] [[--since=yyyy/mm/dd]] [flags] +``` + +### Options + +``` + -h, --help help for sync + --session stringArray the exchange session name for sync + --since string sync from time + --symbol string symbol of market for syncing +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_trades.md b/doc/commands/bbgo_trades.md new file mode 100644 index 0000000000..14c7fcd9d0 --- /dev/null +++ b/doc/commands/bbgo_trades.md @@ -0,0 +1,46 @@ +## bbgo trades + +Query trading history + +``` +bbgo trades --session=[exchange_name] --symbol=[pair_name] [flags] +``` + +### Options + +``` + -h, --help help for trades + --limit int limit (default 100) + --session string the exchange session name for querying balances + --symbol string the trading pair, like btcusdt +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_tradeupdate.md b/doc/commands/bbgo_tradeupdate.md new file mode 100644 index 0000000000..20da715ca8 --- /dev/null +++ b/doc/commands/bbgo_tradeupdate.md @@ -0,0 +1,44 @@ +## bbgo tradeupdate + +Listen to trade update events + +``` +bbgo tradeupdate --session=[exchange_name] [flags] +``` + +### Options + +``` + -h, --help help for tradeupdate + --session string the exchange session name for querying balances +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_transfer-history.md b/doc/commands/bbgo_transfer-history.md new file mode 100644 index 0000000000..dfdc679317 --- /dev/null +++ b/doc/commands/bbgo_transfer-history.md @@ -0,0 +1,46 @@ +## bbgo transfer-history + +show transfer history + +``` +bbgo transfer-history [flags] +``` + +### Options + +``` + --asset string trading symbol + -h, --help help for transfer-history + --session string target exchange session + --since string since time +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_userdatastream.md b/doc/commands/bbgo_userdatastream.md new file mode 100644 index 0000000000..a8c102fd2a --- /dev/null +++ b/doc/commands/bbgo_userdatastream.md @@ -0,0 +1,44 @@ +## bbgo userdatastream + +Listen to session events (orderUpdate, tradeUpdate, balanceUpdate, balanceSnapshot) + +``` +bbgo userdatastream [flags] +``` + +### Options + +``` + -h, --help help for userdatastream + --session string session name +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/commands/bbgo_version.md b/doc/commands/bbgo_version.md new file mode 100644 index 0000000000..23266c3c31 --- /dev/null +++ b/doc/commands/bbgo_version.md @@ -0,0 +1,43 @@ +## bbgo version + +show version name + +``` +bbgo version [flags] +``` + +### Options + +``` + -h, --help help for version +``` + +### Options inherited from parent commands + +``` + --binance-api-key string binance api key + --binance-api-secret string binance api secret + --config string config file (default "bbgo.yaml") + --cpu-profile string cpu profile + --debug debug mode + --dotenv string the dotenv file you want to load (default ".env.local") + --ftx-api-key string ftx api key + --ftx-api-secret string ftx api secret + --ftx-subaccount string subaccount name. Specify it if the credential is for subaccount. + --max-api-key string max api key + --max-api-secret string max api secret + --metrics enable prometheus metrics + --metrics-port string prometheus http server port (default "9090") + --no-dotenv disable built-in dotenv + --slack-channel string slack trading channel (default "dev-bbgo") + --slack-error-channel string slack error channel (default "bbgo-error") + --slack-token string slack token + --telegram-bot-auth-token string telegram auth token + --telegram-bot-token string telegram bot token from bot father +``` + +### SEE ALSO + +* [bbgo](bbgo.md) - bbgo is a crypto trading bot + +###### Auto generated by spf13/cobra on 17-Jun-2022 diff --git a/doc/configuration/envvars.md b/doc/configuration/envvars.md new file mode 100644 index 0000000000..53d3bbc090 --- /dev/null +++ b/doc/configuration/envvars.md @@ -0,0 +1,24 @@ +# Environment Variables + +## MAX Exchange + +```shell +# MAX_QUERY_CLOSED_ORDERS_NUM_OF_PAGES=[number of pages] +# The MAX Exchange API does not support time-range based query for the closed orders +# We can only sync the closed orders by page number, here is the maximum pages you want to sync +MAX_QUERY_CLOSED_ORDERS_NUM_OF_PAGES=10 + + +# MAX_QUERY_CLOSED_ORDERS_LIMIT=[limit per query] +# using defualt limit 1000 might cause the server response timeout, you can decrease this limit to prevent this kind of error. +# default = 1000 +MAX_QUERY_CLOSED_ORDERS_LIMIT=500 + + +# MAX_QUERY_CLOSED_ORDERS_ALL=[1 or 0] +# The MAX Exchange API does not support time-range based query for the closed orders +# If you want to sync all the orders, you must start from the first page +# To enable this mode, set this variable to 1 +MAX_QUERY_CLOSED_ORDERS_ALL=1 +``` + diff --git a/doc/configuration/slack.md b/doc/configuration/slack.md new file mode 100644 index 0000000000..881114b292 --- /dev/null +++ b/doc/configuration/slack.md @@ -0,0 +1,37 @@ +### Setting up Slack Notification + +Go to the Slack apps page to create your own slack app: + + + +Click "Install your app" -> "Install to Workspace" + +Copy the *Bot User OAuth Token*. + +Put your slack bot token in the `.env.local` file: + +```sh +SLACK_TOKEN=xxoox +``` + +And add the following notification config in your `bbgo.yml`: + +```yaml +--- +notifications: + slack: + defaultChannel: "bbgo-xarb" + errorChannel: "bbgo-error" + + # routing rules + routing: + trade: "$silent" + order: "$slient" + submitOrder: "$slient" +``` + +Besure to add your bot to the public channel by clicking "Add slack app to channel". + +## See Also + +- diff --git a/doc/configuration/sync.md b/doc/configuration/sync.md new file mode 100644 index 0000000000..2322642903 --- /dev/null +++ b/doc/configuration/sync.md @@ -0,0 +1,42 @@ +# Sync Private Trading Data + +You can use the following configuration (add this to your bbgo.yaml) to sync your private trading data, like closed +orders and trades: + +```yaml +sync: + # since is the date you want to start sync + since: 2019-11-01 + + # if you have multiple sessions defined, but you don't want to sync all sessions, you can define a list here + sessions: + - binance + - max + + # optional, if you want to insert the trades and orders from the websocket stream + # if you're running multiple bbgo instance, you should avoid setting this on + userDataStream: + # if you set this, all received trades will be written into the database + trades: true + # if you set this, all received filled orders will be written into the database + filledOrders: true + + # symbols is the symbol you want to sync + # If not defined, BBGO will try to guess your symbols by your existing account balances + symbols: + - BTCUSDT + - ETHUSDT + - LINKUSDT +``` + +Then you can start syncing by running the following command: + +```shell +bbgo sync --config config/bbgo.yaml +``` + +Or just (if you have bbgo.yaml is the current working directory): + +```shell +bbgo sync +``` diff --git a/doc/configuration/telegram.md b/doc/configuration/telegram.md new file mode 100644 index 0000000000..40da638840 --- /dev/null +++ b/doc/configuration/telegram.md @@ -0,0 +1,55 @@ +### Setting up Telegram Bot Notification + +Open your Telegram app, and chat with @botFather + +Enter `/newbot` to create a new bot + +Enter the bot display name. ex. `your_bbgo_bot` + +Enter the bot username. This should be global unique. e.g., `bbgo_bot_711222333` + +Botfather will response your a bot token. *Keep bot token safe* + +Add `TELEGRAM_BOT_TOKEN` in your `.env.local` file, e.g., + +```shell +TELEGRAM_BOT_TOKEN=347374838:ABFTjfiweajfiawoejfiaojfeijoaef +``` + +For the telegram chat authentication (your bot needs to verify it's you), if you only need a fixed authentication token, +you can set `TELEGRAM_AUTH_TOKEN` in the `.env.local` file, e.g., + +```sh +TELEGRAM_BOT_AUTH_TOKEN=itsme55667788 +``` + +The alerting strategies use Telegram bot notification without further configuration. You can check the [pricealert +yaml file](../../config/pricealert-tg.yaml) in the `config/` directory for example. + +Run your bbgo. + +Open your Telegram app, search your bot `bbgo_bot_711222333` + +Send `/auth` and then send your auth token to get authorized. + +Done! Your notifications will be routed to the telegram chat. + +## Authenticating yourself with OTP + +BBGO supports one-time password (OTP) authentication for Telegram, so you can auth yourself by the one-time password. + +When you run your bbgo with the telegram token first time, it will generate an otp token in a PNG file (named otp-xxxx.png) and also the console output. + +You should store the otp token in a safe place like 1Password. + +In order to save the OTP secret persistently, you should configure your BBGO with redis, simply add the following config to your `bbgo.yaml`: + +```yaml +persistence: + json: + directory: var/data + redis: + host: 127.0.0.1 + port: 6379 + db: 0 +``` diff --git a/doc/deployment/helm-chart.md b/doc/deployment/helm-chart.md new file mode 100644 index 0000000000..d9e1837226 --- /dev/null +++ b/doc/deployment/helm-chart.md @@ -0,0 +1,94 @@ +# Helm Chart + +## Requirement + +- redis (optional, if you need persistence) +- docker image (you can use the image from docker hub or build one by yourself) + +## Install + +If you need redis: + +```sh +helm repo add bitnami https://charts.bitnami.com/bitnami +helm install redis bitnami/redis +``` + +To get the dynamically generated redis password, you can use the following command: + +```sh +export REDIS_PASSWORD=$(kubectl get secret --namespace bbgo redis -o jsonpath="{.data.redis-password}" | base64 --decode) +``` + +Prepare your docker image locally (you can also use the docker image from docker hub): + +```sh +make docker DOCKER_TAG=1.16.0 +``` + +The docker tag version number is from the file [Chart.yaml](charts/bbgo/Chart.yaml) + +Choose your instance name: + +```sh +export INSTANCE=grid +``` + +Prepare your secret: + +```sh +kubectl create secret generic bbgo-$INSTANCE --from-env-file .env.local +``` + +Configure your config file, the chart defaults to read config/bbgo.yaml to create a configmap: + +```sh +cp config/grid.yaml bbgo-$INSTANCE.yaml +vim bbgo-$INSTANCE.yaml +``` + +Prepare your configmap: + +```sh +kubectl create configmap bbgo-$INSTANCE --from-file=bbgo.yaml=bbgo-$INSTANCE.yaml +``` + +Install chart with the preferred release name, the release name maps to the previous secret we just created, that +is, `bbgo-grid`: + +```sh +helm install bbgo-$INSTANCE ./charts/bbgo +``` + +By default, the helm chart uses configmap and dotenv secret by the release name, +if you have an existing configmap that is not named `bbgo-$INSTANCE`, you can specify the configmap via +the `existingConfigmap` option: + +```sh +helm install --set existingConfigmap=bbgo-$INSTANCE bbgo-$INSTANCE ./charts/bbgo +``` + +To use the latest version: + +```sh +helm install --set existingConfigmap=bbgo-$INSTANCE --set image.tag=latest bbgo-$INSTANCE ./charts/bbgo +``` + +Or, if you have custom values.yaml to override the default values: + +```sh +helm install --values deploy/my-bbgo-values.yaml bbgo-$INSTANCE ./charts/bbgo +``` + +To upgrade: + +```sh +helm upgrade bbgo-$INSTANCE ./charts/bbgo +helm upgrade --set image.tag=1.15.2 bbgo-$INSTANCE ./charts/bbgo +``` + +## Delete an installed chart + +```sh +helm delete bbgo-$INSTANCE +``` diff --git a/doc/deployment/systemd.md b/doc/deployment/systemd.md new file mode 100644 index 0000000000..87291be7ed --- /dev/null +++ b/doc/deployment/systemd.md @@ -0,0 +1,42 @@ +## Setting up Systemd + +If you want to deploy your bbgo binary to a linux system, you could use the systemd to launch your daemon. + +To do this, add service file into the directory `/etc/systemd/system/bbgo.service` with the following content: + +```shell +[Unit] +After=network-online.target +Wants=network-online.target + +[Install] +WantedBy=multi-user.target + +[Service] +WorkingDirectory=/home/bbgo +# KillMode=process +ExecStart=/home/bbgo/bbgo run +User=bbgo +Restart=always +RestartSec=60 +``` + +Then, to load the service file, you need to run: + +```shell +systemctl daemon-reload +``` + +And then you can start your service by running enable and start: + +```shell +systemctl enable bbgo.service +systemctl start bbgo.service +``` + +To stop your service, you can run: + +```shell +systemctl stop bbgo.service +``` + diff --git a/doc/development/adding-new-exchange.md b/doc/development/adding-new-exchange.md new file mode 100644 index 0000000000..87cef0dfb8 --- /dev/null +++ b/doc/development/adding-new-exchange.md @@ -0,0 +1,229 @@ +# Adding New Exchange + +Open an issue and paste the following checklist to that issue. + +You should send multiple small pull request to implement them. + +**Please avoid sending a pull request with huge changes** + +**Important** -- for the underlying http API please use `requestgen` to generate the +requests. + +## Checklist + +Exchange Interface - the minimum requirement for spot trading + +- [ ] QueryMarkets +- [ ] QueryTickers +- [ ] QueryOpenOrders +- [ ] SubmitOrders +- [ ] CancelOrders +- [ ] NewStream + +Trading History Service Interface - (optional) used for syncing user trading data + +- [ ] QueryClosedOrders +- [ ] QueryTrades + +Order Query Service Interface - (optional) used for querying order status + +- [ ] QueryOrder + +Back-testing service - kline data is used for back-testing + +- [ ] QueryKLines + +Convert functions: + +- [ ] MarketData convert functions + - [ ] toGlobalMarket + - [ ] toGlobalTicker + - [ ] toGlobalKLine +- [ ] UserData convert functions + - [ ] toGlobalOrder + - [ ] toGlobalTrade + - [ ] toGlobalAccount + - [ ] toGlobalBalance + +Stream + +- [ ] UserDataStream + - [ ] Trade message parser + - [ ] Order message parser + - [ ] Account message parser + - [ ] Balance message parser +- [ ] MarketDataStream + - [ ] OrderBook message parser (or depth) + - [ ] KLine message parser (required for backtesting) + - [ ] Public trade message parser (optional) + - [ ] Ticker message parser (optional) +- [ ] ping/pong handling. +- [ ] heart-beat hanlding or keep-alive handling. +- [ ] handling reconnect + +Database + +- [ ] Add a new kline table for the exchange (this is required for back-testing) + - [ ] Add MySQL migration SQL + - [ ] Add SQLite migration SQL + +Exchange Factory + +- [ ] Add the exchange constructor to the exchange instance factory function. +- [ ] Add extended fields to the ExchangeSession struct. (optional) + +# Tools + +- Use a tool to convert JSON response to Go struct +- Use requestgen to generate request builders +- Use callbackgen to generate callbacks + +# Implementation + +Go to `pkg/types/exchange.go` and add your exchange type: + +``` +const ( + ExchangeMax = ExchangeName("max") + ExchangeBinance = ExchangeName("binance") + ExchangeFTX = ExchangeName("ftx") + ExchangeOKEx = ExchangeName("okex") + ExchangeKucoin = ExchangeName("kucoin") + ExchangeBacktest = ExchangeName("backtest") +) +``` + +Go to `pkg/cmd/cmdutil/exchange.go` and add your exchange to the factory + +``` +func NewExchangeStandard(n types.ExchangeName, key, secret, passphrase, subAccount string) (types.Exchange, error) { + switch n { + + case types.ExchangeFTX: + return ftx.NewExchange(key, secret, subAccount), nil + + case types.ExchangeBinance: + return binance.New(key, secret), nil + + case types.ExchangeOKEx: + return okex.New(key, secret, passphrase), nil + + // ... + } +} +``` + +## Using requestgen + +### Alias + +You can put the go:generate alias on the top of the file: + +``` +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST +//go:generate -command DeleteRequest requestgen -method DELETE +``` + +Please note that the alias only works in the same file. + +### Defining Request Type Names + +Please define request type name in the following format: + +``` +{Verb}{Service}{Resource}Request +``` + +for example: + +``` +type GetMarginMarketsRequest struct { + client requestgen.APIClient +} +``` + +then you can attach the go:generate command on that type: + +``` + +//go:generate GetRequest -url "/api/v3/wallet/m/limits" -type GetMarginBorrowingLimitsRequest -responseType .MarginBorrowingLimitMap +``` + +## Un-marshalling Timestamps + +For millisecond timestamps, you can use `types.MillisecondTimestamp`, it will automatically convert the timestamp into +time.Time: + +``` +type MarginInterestRecord struct { + Currency string `json:"currency"` + CreatedAt types.MillisecondTimestamp `json:"created_at"` +} +``` + +## Un-marshalling numbers + +For number fields, especially floating numbers, please use `fixedpoint.Value`, it can parse int, float64, float64 in +string: + +``` +type A struct { + Amount fixedpoint.Value `json:"amount"` +} +``` + +## Test Market Data Stream + +### Test order book stream + +```shell +godotenv -f .env.local -- go run ./cmd/bbgo orderbook --config config/bbgo.yaml --session kucoin --symbol BTCUSDT +``` + +## Test User Data Stream + +```shell +godotenv -f .env.local -- go run ./cmd/bbgo --config config/bbgo.yaml userdatastream --session kucoin +``` + +## Test Restful Endpoints + +You can choose the session name to set-up for testing: + +```shell +export BBGO_SESSION=ftx +export BBGO_SESSION=kucoin +export BBGO_SESSION=binance +``` + +### Test user account balance + +```shell +godotenv -f .env.local -- go run ./cmd/bbgo balances --session $BBGO_SESSION +``` + +### Test order submit + +```shell +godotenv -f .env.local -- go run ./cmd/bbgo submit-order --session $BBGO_SESSION --symbol=BTCUSDT --side=buy --price=18000 --quantity=0.001 +``` + +### Test open orders query + +```shell +godotenv -f .env.local -- go run ./cmd/bbgo list-orders --session $BBGO_SESSION --symbol=BTCUSDT open +godotenv -f .env.local -- go run ./cmd/bbgo list-orders --session $BBGO_SESSION --symbol=BTCUSDT closed +``` + +### Test order status + +```shell +godotenv -f .env.local -- go run ./cmd/bbgo get-order --session $BBGO_SESSION --order-id ORDER_ID +``` + +### Test order cancel + +```shell +godotenv -f .env.local -- go run ./cmd/bbgo cancel-order --session $BBGO_SESSION --order-uuid 61c745c44592c200014abdcf +``` diff --git a/doc/development/frontend.md b/doc/development/frontend.md new file mode 100644 index 0000000000..22f0a54474 --- /dev/null +++ b/doc/development/frontend.md @@ -0,0 +1,40 @@ +### Setup frontend development environment + +You will need yarn to install the dependencies: + +```shell +npm install -g yarn +``` + +And you need next.js: + +```shell +npm install -g next@11 +``` + +The frontend files are in the `frontend/` directory: + +```sh +cd frontend +``` + +Run `yarn install` to install the dependencies: + +```shell +yarn install +``` + +Build and compile the frontend static files: + +```shell +yarn export +``` + +To start development, use: + +```shell +yarn dev +``` + + + diff --git a/doc/development/indicator.md b/doc/development/indicator.md new file mode 100644 index 0000000000..dfab564cef --- /dev/null +++ b/doc/development/indicator.md @@ -0,0 +1,104 @@ +How To Use Builtin Indicators and Create New Indicators +------------------------------------------------------- + +### Built-in Indicators +In bbgo session, we already have several indicators defined inside. +We could refer to the live-data without the worriedness of handling market data subscription. +To use the builtin ones, we could refer the `StandardIndicatorSet` type: + +```go +// defined in pkg/bbgo/session.go +(*StandardIndicatorSet) BOLL(iw types.IntervalWindow, bandwidth float64) *indicator.BOLL +(*StandardIndicatorSet) SMA(iw types.IntervalWindow) *indicator.SMA +(*StandardIndicatorSet) EWMA(iw types.IntervalWindow) *indicator.EWMA +(*StandardIndicatorSet) STOCH(iw types.IntervalWindow) *indicator.STOCH +(*StandardIndicatorSet) VOLATILITY(iw types.IntervalWindow) *indicator.VOLATILITY +``` + +and to get the `*StandardIndicatorSet` from `ExchangeSession`, just need to call: +```go +indicatorSet, ok := session.StandardIndicatorSet("BTCUSDT") // param: symbol +``` +in your strategy's `Run` function. + + +And in `Subscribe` function in strategy, just subscribe the `KLineChannel` on the interval window of the indicator you want to query, you should be able to acquire the latest number on the indicators. + +However, what if you want to use the indicators not defined in `StandardIndicatorSet`? For example, the `AD` indicator defined in `pkg/indicators/ad.go`? + +Here's a simple example in what you should write in your strategy code: +```go +import ( + "context" + "fmt" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/types" + "github.com/c9s/bbgo/pkg/indicator" +) + +type Strategy struct {} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + session.Subscribe(types.KLineChannel, s.Symbol. types.SubscribeOptions{Interval: "1m"}) +} + +func (s *Strategy) Run(ctx context.Context, oe bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + // first we need to get market data store(cached market data) from the exchange session + st, ok := session.MarketDataStore(s.Symbol) + if !ok { + ... + return err + } + // setup the time frame size + window := types.IntervalWindow{Window: 10, Interval: types.Interval1m} + // construct AD indicator + AD := &indicator.AD{IntervalWindow: window} + // bind indicator to the data store, so that our callback could be triggered + AD.Bind(st) + AD.OnUpdate(func (ad float64) { + fmt.Printf("now we've got ad: %f, total length: %d\n", ad, AD.Length()) + }) +} +``` + +#### To Contribute + +try to create new indicators in `pkg/indicator/` folder, and add compilation hint of go generator: +```go +// go:generate callbackgen -type StructName +type StructName struct { + ... + UpdateCallbacks []func(value float64) +} + +``` +And implement required interface methods: +```go +// custom function +func (inc *StructName) calculateAndUpdate(kLines []types.KLine) { + // calculation... + // assign the result to calculatedValue + inc.EmitUpdate(calculatedValue) // produce data, broadcast to the subscribers +} + +// custom function +func (inc *StructName) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + // filter on interval + inc.calculateAndUpdate(window) +} + +// required +func (inc *StructName) Bind(updator KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} +``` + +The `KLineWindowUpdater` interface is currently defined in `pkg/indicator/ewma.go` and may be moved out in the future. + +Once the implementation is done, run `go generate` to generate the callback functions of the indicator. +You should be able to implement your strategy and use the new indicator in the same way as `AD`. + +#### Generalize + +In order to provide indicator users a lower learning curve, we've designed the `types.Series` interface. We recommend indicator developers to also implement the `types.Series` interface to provide richer functionality on the computed result. To have deeper understanding how `types.Series` works, please refer to [doc/development/series.md](./series.md) diff --git a/doc/development/kucoin-cli.md b/doc/development/kucoin-cli.md new file mode 100644 index 0000000000..6f803f0b6d --- /dev/null +++ b/doc/development/kucoin-cli.md @@ -0,0 +1,22 @@ +# Kucoin command-line tool + +```shell +go run ./examples/kucoin accounts +go run ./examples/kucoin subaccounts +go run ./examples/kucoin symbols +go run ./examples/kucoin tickers +go run ./examples/kucoin tickers BTC-USDT +go run ./examples/kucoin orderbook BTC-USDT 20 +go run ./examples/kucoin orderbook BTC-USDT 100 + +go run ./examples/kucoin orders place --symbol LTC-USDT --price 50 --size 1 --order-type limit --side buy +go run ./examples/kucoin orders --symbol LTC-USDT --status active +go run ./examples/kucoin orders --symbol LTC-USDT --status done +go run ./examples/kucoin orders cancel --order-id 61b48b73b4de3e0001251382 + +# list history orders +go run ./examples/kucoin orders history --symbol BTC-USDT + +go run ./examples/kucoin fills --symbol LTC-USDT + +``` diff --git a/doc/development/migration.md b/doc/development/migration.md new file mode 100644 index 0000000000..50070dc511 --- /dev/null +++ b/doc/development/migration.md @@ -0,0 +1,63 @@ +# Migration + +### Adding new migration + +1. The project used rockerhopper for db migration. + https://github.com/c9s/rockhopper + + +2. Create migration files + + +You can use the util script to generate the migration files: + +``` +bash utils/generate-new-migration.sh add_pnl_column +``` + +Or, you can generate the migration files separately: + +```sh +rockhopper --config rockhopper_sqlite.yaml create --type sql add_pnl_column +rockhopper --config rockhopper_mysql.yaml create --type sql add_pnl_column +``` + + +Be sure to edit both sqlite3 and mysql migration files. ( [Sample](migrations/mysql/20210531234123_add_kline_taker_buy_columns.sql) ) + +To test the drivers, you have to update the rockhopper_mysql.yaml file to connect your database, +then do: + +```sh +rockhopper --config rockhopper_sqlite.yaml up +rockhopper --config rockhopper_mysql.yaml up +``` + +Then run the following command to compile the migration files into go files: + +```shell +make migrations +``` + +or + +```shell +rockhopper compile --config rockhopper_mysql.yaml --output pkg/migrations/mysql +rockhopper compile --config rockhopper_sqlite.yaml --output pkg/migrations/sqlite3 +git add -v pkg/migrations && git commit -m "compile and update migration package" pkg/migrations || true +``` + + +If you want to override the DSN and the Driver defined in the YAML config file, you can add some env vars in your dotenv file like this: + +```shell +ROCKHOPPER_DRIVER=mysql +ROCKHOPPER_DIALECT=mysql +ROCKHOPPER_DSN="root:123123@unix(/opt/local/var/run/mysql57/mysqld.sock)/bbgo" +``` + +And then, run: + +```shell +dotenv -f .env.local -- rockhopper --config rockhopper_mysql.yaml up +``` diff --git a/doc/development/release-process.md b/doc/development/release-process.md new file mode 100644 index 0000000000..f92152dd16 --- /dev/null +++ b/doc/development/release-process.md @@ -0,0 +1,44 @@ +# Release Process + +## 1. Run the release test script + +```shell +bash scripts/release-test.sh +``` + +## 2. Prepare the release note + +You need to prepare the release note for your next release version. + +The release notes are placed in the `doc/release` directory. + +If your next version is `v1.20.2`, then you should put the release note in the following file: + +``` +doc/release/v1.20.2.md +``` + +Run changelog script to generate a changelog template: + +```sh +bash utils/changelog.sh > doc/release/v1.20.2.md +``` + +Edit your changelog. + +## 3. Make the release + +Run the following command to create the release: + +```sh +make version VERSION=v1.20.2 +``` + +The above command wilL: + +- Update and compile the migration scripts into go files. +- Bump the version name in the go code. +- Run git tag to create the tag. +- Run git push to push the created tag. + +You can go to to modify the changelog diff --git a/doc/development/series.md b/doc/development/series.md new file mode 100644 index 0000000000..08eec47112 --- /dev/null +++ b/doc/development/series.md @@ -0,0 +1,43 @@ +Indicator Interface +----------------------------------- + +In bbgo, we've added several interfaces to standardize the indicator protocol. +The new interfaces will allow strategy developers switching similar indicators without checking the code. +Signal contributors or indicator developers were also able to be benefit from the existing interface functions, such as `Add`, `Mul`, `Minus`, and `Div`, without rebuilding the wheels. + +The series interface in bbgo borrows the concept of `series` type in pinescript that allow us to query data in time-based reverse order (data that created later will be the former object in series). Right now, based on the return type, we have two interfaces been defined in [pkg/types/indicator.go](../../pkg/types/indicator.go): + +```go +type Series interface { + Last() float64 // newest element + Index(i int) float64 // i >= 0, query float64 data in reverse order using i as index + Length() int // length of data piped in array +} +``` + +and + +```go +type BoolSeries interface { + Last() bool // newest element + Index(i int) bool // i >= 0, query bool data in reverse order using i as index + Length() int // length of data piped in array +} +``` + +Series were used almost everywhere in indicators to return the calculated numeric results, but the use of BoolSeries is quite limited. At this moment, we only use BoolSeries to check if some condition is fullfilled at some timepoint. For example, in `CrossOver` and `CrossUnder` functions if `Last()` returns true, then there might be a cross event happend on the curves at the moment. + +#### Expected Implementation + +The calculation could either be done during invoke time (lazy init, for example), or pre-calculated everytime when event happens(ex: kline close). If it's done during invoke time and the computation is CPU intensive, better to cache the result somewhere inside the struct. Also remember to always implement the Series interface on indicator's struct pointer, so that access to the indicator would always point to the same memory space. + +#### Compile Time Check + +We recommend developers to add the following line inside the indicator source: + +```go +var _ types.Series = &INDICATOR_TYPENAME{} +// Change INDICATOR_TYPENAME to the struct name that implements types.Series +``` + +and if any of the method in the interface not been implemented, this would generate compile time error messages. diff --git a/doc/release/v1.19.3.md b/doc/release/v1.19.3.md new file mode 100644 index 0000000000..a42ef02eb6 --- /dev/null +++ b/doc/release/v1.19.3.md @@ -0,0 +1,4 @@ +## Fixes + +- Added error check for sync-from-time and first-sync-time + diff --git a/doc/release/v1.19.4.md b/doc/release/v1.19.4.md new file mode 100644 index 0000000000..b48e1fdb88 --- /dev/null +++ b/doc/release/v1.19.4.md @@ -0,0 +1,3 @@ +## Fixes + +- Fixed market info cache for backtest diff --git a/doc/release/v1.20.0.md b/doc/release/v1.20.0.md new file mode 100644 index 0000000000..7609a61a54 --- /dev/null +++ b/doc/release/v1.20.0.md @@ -0,0 +1,7 @@ +## Fixes + +- ftx: fixed FTX OnKLineClosed event + +## Migrations + +- add `is_futures` fields to orders and trades table diff --git a/doc/release/v1.21.0.md b/doc/release/v1.21.0.md new file mode 100644 index 0000000000..24d6da67eb --- /dev/null +++ b/doc/release/v1.21.0.md @@ -0,0 +1,23 @@ + +## Fixes + +- Fixed back-test concurrent read/write map issue +- Fixed back-test go routine initialization +- Fixed trade notification format - remove trade trailing zero digits +- Fixed dockerfile +- Fixed back-test kline sync issue + +## Refactoring + +- Refactored and fixed depth buffer + +## Strategy + +- Added rebalance strategy + +## Features + +- Added prometheus metrics support +- Added kucoin exchange support +- Added bookTicker support to binance, okex, ftx +- Integrated net asset value recording table in db diff --git a/doc/release/v1.21.1.md b/doc/release/v1.21.1.md new file mode 100644 index 0000000000..03474035dd --- /dev/null +++ b/doc/release/v1.21.1.md @@ -0,0 +1,13 @@ +[Full Changelog](https://github.com/c9s/bbgo/compare/v1.21.0...v1.21.1) + +## Fixes + +- Fixed pendingRemoval lock +- Fixed trailing zero formatting +- Fixed binance margin order cancel + +## Features + +- [#409](https://github.com/c9s/bbgo/pull/409): binance: add futures parser +- [#408](https://github.com/c9s/bbgo/pull/408): feature: add metricsLastUpdateTimeBalance metrics +- [#407](https://github.com/c9s/bbgo/pull/407): feature: update helm chart for metrics and webserver option diff --git a/doc/release/v1.21.2.md b/doc/release/v1.21.2.md new file mode 100644 index 0000000000..24aef3f811 --- /dev/null +++ b/doc/release/v1.21.2.md @@ -0,0 +1,3 @@ +## Fixes + +- Fixed kucoin api context issue diff --git a/doc/release/v1.21.3.md b/doc/release/v1.21.3.md new file mode 100644 index 0000000000..f3ef542a25 --- /dev/null +++ b/doc/release/v1.21.3.md @@ -0,0 +1,6 @@ +## Fixes + +- Fixed kucoin orderTime parsing (it's nanosecond timestamp) +- Fixed kucoin order cancel issue (the missing order ID) +- Fixed kucoin order status "canceled" handling +- Fixed binance stream ping worker and listen key keep alive diff --git a/doc/release/v1.21.4.md b/doc/release/v1.21.4.md new file mode 100644 index 0000000000..dc2e5d0bf8 --- /dev/null +++ b/doc/release/v1.21.4.md @@ -0,0 +1,4 @@ +## Features + +- Added binance.us support by env var `BINANCE_US=1` + diff --git a/doc/release/v1.22.0.md b/doc/release/v1.22.0.md new file mode 100644 index 0000000000..1840e0f7e1 --- /dev/null +++ b/doc/release/v1.22.0.md @@ -0,0 +1,8 @@ +## Features + +- Added binance futures stream positions handler + +## Refactoring + +- Refactored 80% kucoin api services with requestgen + diff --git a/doc/release/v1.22.1.md b/doc/release/v1.22.1.md new file mode 100644 index 0000000000..7c2a03d26d --- /dev/null +++ b/doc/release/v1.22.1.md @@ -0,0 +1,9 @@ +## Fixes + +- Fixed kucoin trade convert +- Fixed trade, order data sync issue +- Fixed binance withdrawal rejected record failure issue + +## Minor + +- Print klines only when `DEBUG_KLINE` is enabled. diff --git a/doc/release/v1.22.2.md b/doc/release/v1.22.2.md new file mode 100644 index 0000000000..e17d0d5316 --- /dev/null +++ b/doc/release/v1.22.2.md @@ -0,0 +1,7 @@ +## Fixes + +- Fixed kucoin query history order for trading data syncing. + +## Minor + +- Avoid syncing canceled unfilled orders into db. diff --git a/doc/release/v1.22.3.md b/doc/release/v1.22.3.md new file mode 100644 index 0000000000..4431ab5fb1 --- /dev/null +++ b/doc/release/v1.22.3.md @@ -0,0 +1,4 @@ +## Fixes + +- Fixed empty start time sync issue + diff --git a/doc/release/v1.23.0.md b/doc/release/v1.23.0.md new file mode 100644 index 0000000000..2168ffc758 --- /dev/null +++ b/doc/release/v1.23.0.md @@ -0,0 +1,10 @@ +## Minor + +- Improved bollpp strategy. +- Refactored websocket stream. + +## Fixes + +- Fixed limit maker order type in back-testing +- Added market info cache expiry time +- Fixed MAX exchange authenticated event parsing diff --git a/doc/release/v1.24.0.md b/doc/release/v1.24.0.md new file mode 100644 index 0000000000..273dbc30c5 --- /dev/null +++ b/doc/release/v1.24.0.md @@ -0,0 +1,14 @@ +## Minor + +- Adjust minimal quantity and minimal notional threshold +- Improved position fee display for telegram + +## Strategy + +- Added bollmaker strategy + +## Fixes + +- Added error rate limiter for xmaker +- Fixed slack rate limit by applying rate limiter to 1 message per second +- Fixed quantity truncation and add check for min quantity n min notional for xmaker diff --git a/doc/release/v1.25.0.md b/doc/release/v1.25.0.md new file mode 100644 index 0000000000..f0b78abbb5 --- /dev/null +++ b/doc/release/v1.25.0.md @@ -0,0 +1,3 @@ +## Minor + +- Finalizing bollmaker strategy diff --git a/doc/release/v1.25.1.md b/doc/release/v1.25.1.md new file mode 100644 index 0000000000..8cf8cbdfe4 --- /dev/null +++ b/doc/release/v1.25.1.md @@ -0,0 +1,7 @@ +## Fixes + +- binance: fix order cancel client order id usage +- fixed pendingRemoval check +- fixed active book order removal +- strategy/support: fixed triggerEMA pointer panic +- strategy/xmaker: fixed error rate limiter, keep and check rate reservation token diff --git a/doc/release/v1.25.2.md b/doc/release/v1.25.2.md new file mode 100644 index 0000000000..cf7eed0be8 --- /dev/null +++ b/doc/release/v1.25.2.md @@ -0,0 +1,4 @@ +## Fixes + +- binance: fix listen key worker +- binance: adjust websocket read timeout diff --git a/doc/release/v1.25.3.md b/doc/release/v1.25.3.md new file mode 100644 index 0000000000..1b748821fa --- /dev/null +++ b/doc/release/v1.25.3.md @@ -0,0 +1,6 @@ +[Full Changelog](https://github.com/c9s/bbgo/compare/v1.25.2...v1.25.3) + +- [#424](https://github.com/c9s/bbgo/pull/424): fix: [binance] add order rate limiter +- [#432](https://github.com/c9s/bbgo/pull/432): ftx: Support `LIMIT_MAKER` and `IOC_LIMIT` order type +- types: fixed rbt orderbook +- xmaker: add price volume heartbeat protection diff --git a/doc/release/v1.25.4.md b/doc/release/v1.25.4.md new file mode 100644 index 0000000000..13bda6fa23 --- /dev/null +++ b/doc/release/v1.25.4.md @@ -0,0 +1,4 @@ +## Fixes + +- xmaker: add price heart beat checker +- types: fix order book copy diff --git a/doc/release/v1.26.0.md b/doc/release/v1.26.0.md new file mode 100644 index 0000000000..0654d7c8d1 --- /dev/null +++ b/doc/release/v1.26.0.md @@ -0,0 +1,9 @@ +## Features + +- improved telegram interaction and authentication process +- added interaction for bollmaker strategy + +## Fixes + +- fixed local active book graceful order cancel +- bollmaker: fixed uptrendSkew and downtrendSkew parameters setting on bollmaker strategy diff --git a/doc/release/v1.26.1.md b/doc/release/v1.26.1.md new file mode 100644 index 0000000000..c488a24ad2 --- /dev/null +++ b/doc/release/v1.26.1.md @@ -0,0 +1,8 @@ +## Fixes + +- fixed some interaction issue +- fixed telegram token missing issue +- fixed telegram session persistence +- optimized LocalActiveOrderBook for back-testing speed +- bollmaker: collect trades before we shutdown +- bollmaker: adjust quantity to met the min notional condition before we submit diff --git a/doc/release/v1.26.3.md b/doc/release/v1.26.3.md new file mode 100644 index 0000000000..7a28ac1a67 --- /dev/null +++ b/doc/release/v1.26.3.md @@ -0,0 +1,4 @@ +## Fixes + +- fixed kucoin klines query ordering (from descending to ascending) +- fixed grid order cancel context diff --git a/doc/release/v1.28.0.md b/doc/release/v1.28.0.md new file mode 100644 index 0000000000..1604df9b24 --- /dev/null +++ b/doc/release/v1.28.0.md @@ -0,0 +1,26 @@ +## Fixes + +- Fixed back-test time format parsing. +- Fixed and improved the MAX exchange closed order query. +- Fixed MAX closed order query rate limiter. +- Fixed Binance closed orders query +- Fixed KuCoin closed orders query +- Fixed the build command. +- Fixed the telegram session restore. +- Fixed the telegram interaction menu layout for long lists. +- Fixed persistence error by fallback to memory persistence if redis not found. PR #442 +- Fixed the back-test last price not updated issue. PR #431 +- Fixed max kline parsing +- Fixed dynamic field injection +- Fixed cmd config flags + +## Features + +- Added sync config support. +- Added slack interaction support. +- Added position/profit db record support +- Added decimal support (with dnum build tag) + +## Minor + +- Added Binance futures broker ID diff --git a/doc/release/v1.29.0.md b/doc/release/v1.29.0.md new file mode 100644 index 0000000000..9282090d29 --- /dev/null +++ b/doc/release/v1.29.0.md @@ -0,0 +1,16 @@ +## Fixes + +- Fixed kucoin order type conversion for limit maker. +- Fixed kucoin rate limit. +- Fixed persistence selector and injection. +- Fixed rebalance streategy for back-testing. + +## Features + +- Added IM interaction (emergency stop and suspend command). +- Added FTX market trade stream. +- Added Binance market trade stream. +- Added positions, profit table. +- Added Binance paper trade endpoint support. +- Added RSI indicator + diff --git a/doc/release/v1.30.0.md b/doc/release/v1.30.0.md new file mode 100644 index 0000000000..f29023423b --- /dev/null +++ b/doc/release/v1.30.0.md @@ -0,0 +1,10 @@ +## Fixes + +- Fixed backtest source session configuration +- Fixed binance margin closed order query condition for range > 24 hours +- Fixed support strategy conditions and trailing stop order submission + +## Features + +- Added basic gRPC server skeleton +- Added glass node API diff --git a/doc/release/v1.30.1.md b/doc/release/v1.30.1.md new file mode 100644 index 0000000000..b8e44189cd --- /dev/null +++ b/doc/release/v1.30.1.md @@ -0,0 +1,4 @@ +## Fixes + +- Fixed kucoin query trades and query orders api +- Fixed gRPC start time/end time condition for queryKLines PI diff --git a/doc/release/v1.30.2.md b/doc/release/v1.30.2.md new file mode 100644 index 0000000000..200fda16d4 --- /dev/null +++ b/doc/release/v1.30.2.md @@ -0,0 +1,5 @@ +## Fixes + +- bollmaker: fixed timeInForce issue for binance limit maker order +- grpc: fixed user data subscription + diff --git a/doc/release/v1.30.3.md b/doc/release/v1.30.3.md new file mode 100644 index 0000000000..3349c4190b --- /dev/null +++ b/doc/release/v1.30.3.md @@ -0,0 +1,4 @@ +## Fixes + +- Fixed otp auth store -- store key url instead of just secret +- Improved auth interaction messages and added more checks diff --git a/doc/release/v1.31.0.md b/doc/release/v1.31.0.md new file mode 100644 index 0000000000..fbf13b1c7e --- /dev/null +++ b/doc/release/v1.31.0.md @@ -0,0 +1,23 @@ +## Fixes + +- Fixed and improved the trade sync. +- Fixed MAX order history sync with the new order history api. + +## Features + +- Added more sync options for deposit history, withdraw history and reward history. +- Added strategy controller for suspend, resume a strategy. thanks @andycheng123! +- Added more indicators. thanks @zenixls2! + +## Strategy + +- Added autoborrow strategy (this is currently for binance only) +- Added factorzoo strategy. thanks @austin362667! +- Added elliott wave oscillator strategy. thhanks @zenixls2! + +## Internal + +- Upgraded rockhopper migration library for the new APIs. +- Added mutex lock to the account object. +- Refactored and rewrote the MAX API with requestgen. +- Added grpc python client, thanks @narumiruna! diff --git a/doc/release/v1.31.1.md b/doc/release/v1.31.1.md new file mode 100644 index 0000000000..064fdd4cf7 --- /dev/null +++ b/doc/release/v1.31.1.md @@ -0,0 +1,4 @@ +## Fixes + +- Fixed margin order/trade sync. +- Fixed initial bollinger indicator band width setup. diff --git a/doc/release/v1.31.2.md b/doc/release/v1.31.2.md new file mode 100644 index 0000000000..5768380c3d --- /dev/null +++ b/doc/release/v1.31.2.md @@ -0,0 +1,3 @@ +## Fixes + +- Fixed sync config check diff --git a/doc/release/v1.31.3.md b/doc/release/v1.31.3.md new file mode 100644 index 0000000000..48b4dab012 --- /dev/null +++ b/doc/release/v1.31.3.md @@ -0,0 +1,3 @@ +## Fixes + +- Fixed sync pointer check diff --git a/doc/release/v1.31.4.md b/doc/release/v1.31.4.md new file mode 100644 index 0000000000..b0e4f4a6bb --- /dev/null +++ b/doc/release/v1.31.4.md @@ -0,0 +1,10 @@ +[Full Changelog](https://github.com/c9s/bbgo/compare/v1.31.3...main) + +## Improves + + - [#582](https://github.com/c9s/bbgo/pull/582): improve: backtest: rename backtest.account to backtest.accounts + +## Fixes + + - [#580](https://github.com/c9s/bbgo/pull/580): fix: fix okex rate limit + - [#579](https://github.com/c9s/bbgo/pull/579): fix: fix kucoin rate limit diff --git a/doc/release/v1.32.0.md b/doc/release/v1.32.0.md new file mode 100644 index 0000000000..c2b19719a8 --- /dev/null +++ b/doc/release/v1.32.0.md @@ -0,0 +1,20 @@ +[Full Changelog](https://github.com/c9s/bbgo/compare/v1.31.4...main) + +## Fixes +- [#590](https://github.com/c9s/bbgo/pull/590): fix: fix net asset field +- [#589](https://github.com/c9s/bbgo/pull/589): fix: use net asset to calculate inUSD +- [#588](https://github.com/c9s/bbgo/pull/588): fix: add interest and fix net asset column +- [#584](https://github.com/c9s/bbgo/pull/584): feature: record nav values into db + +## Features +- [#587](https://github.com/c9s/bbgo/pull/587): feature: add grpc port and config in helm chart +- [#586](https://github.com/c9s/bbgo/pull/586): add grpc value in helm chart +- [#581](https://github.com/c9s/bbgo/pull/581): feature: add --sync-exchange option to override backtest sync exchanges + +## Tests +- [#585](https://github.com/c9s/bbgo/pull/585): indicator: add test case for boll + + +## Doc +- [#583](https://github.com/c9s/bbgo/pull/583): doc: add frontend development setup doc + diff --git a/doc/release/v1.33.0.md b/doc/release/v1.33.0.md new file mode 100644 index 0000000000..ab45b0ef3d --- /dev/null +++ b/doc/release/v1.33.0.md @@ -0,0 +1,106 @@ +## Fixes + +- backtest: fixed duplicated order update trigger for market order filled status. +- backtest: fixed the kline sync and rewrote the back-filling logic. (faster sync) +- sync: fixed the binance withdraw history sync with the new API. (implemented with requestgen) +- fixed profits table: data too long for profits column 'symbol' error. +- fixed binance bookTicker typename. +- fixed helm chart grpc binding string. +- fixed duplicated kline sync issue and add unique index for kline tables. +- interact: fixed missing make(). +- fixed incorrect binance futures position parsing. +- fixed SMA indicator. +- fixed and improve the sqlite support for back-testing. + +## Features + +- added more binance margin API support +- added binance loan history, repay history, interest history sync. +- added CoinMarketCap API. +- backtest: added web-based backtest report with kline chart and position information. +- backtest: added strategy parameter optimizer (grid search). +- indicator: added cci indicator +- improved and redesigned the strategy persistence API. +- indicator: added emv indicator + +## New Strategies + +- added `supertrend` strategy. +- added `pivotshort` strategy. +- added `dca` strategy. +- added `fmaker` strategy. +- added `autoborrow` strategy. +- added `wall` strategy. + +## Strategy Updates + +- `bollmaker`: added dynamic spread support. +- `bollmaker`: added exchange fee to position. +- `ewo`: fixed entry backtest. +- `rebalance`: use limit orders + +[Full Changelog](https://github.com/c9s/bbgo/compare/v1.32.0...main) + + - [#682](https://github.com/c9s/bbgo/pull/682): fix: fix duplicated filled order update callbacks in backtest + - [#681](https://github.com/c9s/bbgo/pull/681): Indicator/supertrend + - [#653](https://github.com/c9s/bbgo/pull/653): strategy: add supertrend strategy + - [#678](https://github.com/c9s/bbgo/pull/678): interact: fix missing make() + - [#638](https://github.com/c9s/bbgo/pull/638): strategy: add fmaker + - [#679](https://github.com/c9s/bbgo/pull/679): fix: close / rollback queries/transactions on error + - [#676](https://github.com/c9s/bbgo/pull/676): fix: rewrite kline verifying function + - [#674](https://github.com/c9s/bbgo/pull/674): rename LocalActiveOrderBook to just ActiveOrderBook + - [#672](https://github.com/c9s/bbgo/pull/672): fix and simplify LocalActiveOrderBook + - [#671](https://github.com/c9s/bbgo/pull/671): Fix futures position incorrect + - [#670](https://github.com/c9s/bbgo/pull/670): Improve backtest report ui + - [#669](https://github.com/c9s/bbgo/pull/669): fix: fix partial kline sync + - [#667](https://github.com/c9s/bbgo/pull/667): strategy: pivotshort refactor + - [#660](https://github.com/c9s/bbgo/pull/660): pivotshort: clean up strategy + - [#666](https://github.com/c9s/bbgo/pull/666): improve: apply default exchange fee rate + - [#664](https://github.com/c9s/bbgo/pull/664): fix: use the correct id for state loading + - [#663](https://github.com/c9s/bbgo/pull/663): test: add more test on Test_loadPersistenceFields + - [#661](https://github.com/c9s/bbgo/pull/661): fix: drop IsZero + - [#656](https://github.com/c9s/bbgo/pull/656): refactor: drop unused function + - [#657](https://github.com/c9s/bbgo/pull/657): fix: bollmaker: fix short position order + - [#655](https://github.com/c9s/bbgo/pull/655): fix: improve and fix kline sync + - [#654](https://github.com/c9s/bbgo/pull/654): fix: change from local timezone to UTC when do kline synchronization + - [#652](https://github.com/c9s/bbgo/pull/652): refactor/fix: withdraw sync + - [#650](https://github.com/c9s/bbgo/pull/650): Fix: Persistence Reflect IsZero + - [#649](https://github.com/c9s/bbgo/pull/649): fix: max: fix QueryAccount for margin wallet + - [#648](https://github.com/c9s/bbgo/pull/648): feature: binance margin history sync support + - [#644](https://github.com/c9s/bbgo/pull/644): feature: sync binance margin history into db + - [#645](https://github.com/c9s/bbgo/pull/645): feature: add emv indicator, fix: sma + - [#633](https://github.com/c9s/bbgo/pull/633): Fix/ewo entry, backtest + - [#637](https://github.com/c9s/bbgo/pull/637): feature: binance margin loan/interest/repay history + - [#636](https://github.com/c9s/bbgo/pull/636): fix: max: fix trades/orders parsing + - [#635](https://github.com/c9s/bbgo/pull/635): feature: max margin wallet + - [#617](https://github.com/c9s/bbgo/pull/617): feature: bollmaker dynamic spread + - [#634](https://github.com/c9s/bbgo/pull/634): rebalance: place limit orders + - [#632](https://github.com/c9s/bbgo/pull/632): fix: setup-bollgrid.sh: respect exchange name from command line argument + - [#630](https://github.com/c9s/bbgo/pull/630): fix: fix duplicated kline sync issue and add unique index for kline tables + - [#628](https://github.com/c9s/bbgo/pull/628): fix: fix summary report intervals + - [#627](https://github.com/c9s/bbgo/pull/627): feature: add grid optimizer + - [#626](https://github.com/c9s/bbgo/pull/626): use types.Interval instead of string + - [#625](https://github.com/c9s/bbgo/pull/625): feature: web-based back-test report - add mantine UI framework + - [#622](https://github.com/c9s/bbgo/pull/622): fix: back-test report: load position from the manifest + - [#605](https://github.com/c9s/bbgo/pull/605): feature: add web-based back-test report + - [#620](https://github.com/c9s/bbgo/pull/620): fix: sqlite3 compilation + - [#619](https://github.com/c9s/bbgo/pull/619): fix dockerfile. + - [#618](https://github.com/c9s/bbgo/pull/618): fix: golang version in Dockerfile + - [#610](https://github.com/c9s/bbgo/pull/610): feature: SLTP from bookticker. fix: bookTicker typename, depth buffer… + - [#615](https://github.com/c9s/bbgo/pull/615): python: parse balance borrowed + - [#614](https://github.com/c9s/bbgo/pull/614): ftx: Let FTX support 4hr interval + - [#592](https://github.com/c9s/bbgo/pull/592): feature: add CoinMarketCap API + - [#613](https://github.com/c9s/bbgo/pull/613): bollmaker: set exchange fee to position + - [#609](https://github.com/c9s/bbgo/pull/609): Fix error: Data too long for profits column 'symbol' + - [#612](https://github.com/c9s/bbgo/pull/612): python sdk: use decimal. + - [#611](https://github.com/c9s/bbgo/pull/611): feature: add wall strategy + - [#603](https://github.com/c9s/bbgo/pull/603): feature: backtest report - #2 state recorder + - [#599](https://github.com/c9s/bbgo/pull/599): feature: add cci indicator + - [#601](https://github.com/c9s/bbgo/pull/601): feature: backtest report + - [#600](https://github.com/c9s/bbgo/pull/600): fix helm chart grpc binding string + - [#562](https://github.com/c9s/bbgo/pull/562): add Series documentation + - [#598](https://github.com/c9s/bbgo/pull/598): fix: binance data sync + - [#593](https://github.com/c9s/bbgo/pull/593): glassnode: simplify NewAuthenticatedRequest + - [#597](https://github.com/c9s/bbgo/pull/597): strategy: update bollmaker to support new strategy controller + - [#575](https://github.com/c9s/bbgo/pull/575): feature: binance: add get deposit address request API + - [#596](https://github.com/c9s/bbgo/pull/596): improve persistence api diff --git a/doc/release/v1.33.1.md b/doc/release/v1.33.1.md new file mode 100644 index 0000000000..fad27137f4 --- /dev/null +++ b/doc/release/v1.33.1.md @@ -0,0 +1,14 @@ +## Fixes + +- fixed sync since time field check (nil pointer error). +- fixed reflect insert for sqlite. +- fixed drift window indicator. + +[Full Changelog](https://github.com/c9s/bbgo/compare/v1.33.0...main) + + - [#691](https://github.com/c9s/bbgo/pull/691): fix: fix sync since time field check + - [#690](https://github.com/c9s/bbgo/pull/690): config: add dca config + - [#685](https://github.com/c9s/bbgo/pull/685): ci: add node workflow + - [#689](https://github.com/c9s/bbgo/pull/689): fix: fix reflect insert (remove gid field) + - [#688](https://github.com/c9s/bbgo/pull/688): fix: drift window in factorzoo, order_execution print order, refactor… + - [#687](https://github.com/c9s/bbgo/pull/687): fix: check for div zero in drift indicator diff --git a/doc/release/v1.33.2.md b/doc/release/v1.33.2.md new file mode 100644 index 0000000000..0cb536c59b --- /dev/null +++ b/doc/release/v1.33.2.md @@ -0,0 +1,12 @@ +## Fixes + +- fixed net profit for zero fee. +- fixed and rewrite binance deposit history sync. +- refactored and fixed the deposity batch query. +- fixed the pnl command and add warning logs. + +[Full Changelog](https://github.com/c9s/bbgo/compare/v1.33.1...main) + + - [#693](https://github.com/c9s/bbgo/pull/693): fix: fix and rewrite binance deposit history sync + - [#695](https://github.com/c9s/bbgo/pull/695): fix: calcualte fee in quote only when fee is not zero + - [#692](https://github.com/c9s/bbgo/pull/692): fix: fix pnl command calculation and add warning logs diff --git a/doc/release/v1.33.3.md b/doc/release/v1.33.3.md new file mode 100644 index 0000000000..d20dea2b8a --- /dev/null +++ b/doc/release/v1.33.3.md @@ -0,0 +1,12 @@ +## Fixes + +- Fixed MAX v3 order cancel api. +- Fixed active order book order cancel wait time. +- Fixed and refined pivotshort position close code. + +[Full Changelog](https://github.com/c9s/bbgo/compare/v1.33.2...main) + + - [#699](https://github.com/c9s/bbgo/pull/699): pivotshort: add roiMinTakeProfitPercentage option and cumulatedVolume… + - [#697](https://github.com/c9s/bbgo/pull/697): strategy: remove redundant code + - [#698](https://github.com/c9s/bbgo/pull/698): strategy pivotshort: refactor and add stop EMA + - [#677](https://github.com/c9s/bbgo/pull/677): strategy: pivotshort: improve short position trigger diff --git a/doc/release/v1.33.4.md b/doc/release/v1.33.4.md new file mode 100644 index 0000000000..20775ba679 --- /dev/null +++ b/doc/release/v1.33.4.md @@ -0,0 +1,23 @@ +## Fixes + +- Fixed fixedpoint percentage boundary check. +- Fixed syncing goroutine leak +- Removed kline debug log +- Fixed telegram notifier args filtering. +- Fixed message format args filtering. +- Fixed RecordPosition profit pointer checking. + +## Strategy Updates + +- pivotshort: add bounce short support. + +[Full Changelog](https://github.com/c9s/bbgo/compare/v1.33.3...main) + + - [#712](https://github.com/c9s/bbgo/pull/712): fix: fixedpoint percentage bound check + - [#710](https://github.com/c9s/bbgo/pull/710): strategy: pivot: add bounce short + - [#708](https://github.com/c9s/bbgo/pull/708): format js code by prettier + - [#706](https://github.com/c9s/bbgo/pull/706): add prettier to format the typescript code + - [#703](https://github.com/c9s/bbgo/pull/703): fix: syncing goroutine leak + - [#705](https://github.com/c9s/bbgo/pull/705): add codecoverage badge + - [#704](https://github.com/c9s/bbgo/pull/704): ci: codecoverage + - [#700](https://github.com/c9s/bbgo/pull/700): pivotshort: add breakLow.bounceRatio option diff --git a/doc/release/v1.34.0.md b/doc/release/v1.34.0.md new file mode 100644 index 0000000000..962ea8ac0c --- /dev/null +++ b/doc/release/v1.34.0.md @@ -0,0 +1,43 @@ +## Fixes + +- Fixed futures kline data and ticker data. +- Fixed frontend data sync blocking issue. +- Fixed xmaker bollinger band value checking. + +## Improvments + +- Sharing backtest report kline data. +- Upgraded frontend material UI from v4 to v5.8.3. +- Added sync session symbol support. + +## Features + +- Added bool parameter support for optimizer. +- Added ALMA indicator. +- Added frontend sync button. +- Added Ehler's super smoother filter. +- Added frontend grid stats panel. + +## Strategies + +- Added EWO histogram +- Refactored and updated rebalance strategy. + + +[Full Changelog](https://github.com/c9s/bbgo/compare/v1.33.4...main) + + - [#723](https://github.com/c9s/bbgo/pull/723): feature: add Ehler's Super smoother filter + - [#724](https://github.com/c9s/bbgo/pull/724): Grid panel draft + - [#726](https://github.com/c9s/bbgo/pull/726): rebalance: replace float slice by string-value map + - [#725](https://github.com/c9s/bbgo/pull/725): rebalance: simplify code + - [#713](https://github.com/c9s/bbgo/pull/713): improve: share klines tsv + - [#722](https://github.com/c9s/bbgo/pull/722): fix futures mode not use futures kline data. + - [#719](https://github.com/c9s/bbgo/pull/719): optimizer: bool type parameter + - [#718](https://github.com/c9s/bbgo/pull/718): fix: sync api guard condition + - [#716](https://github.com/c9s/bbgo/pull/716): fix: frontend: do not block whole page while syncing + - [#707](https://github.com/c9s/bbgo/pull/707): feature: add basic implementation of alma indicator + - [#717](https://github.com/c9s/bbgo/pull/717): strategy: fix xmaker bollinger band value checking and value updating + - [#715](https://github.com/c9s/bbgo/pull/715): feature: on demand sync button + - [#714](https://github.com/c9s/bbgo/pull/714): improve: support specifying session in the sync symbol + - [#647](https://github.com/c9s/bbgo/pull/647): strategy: ewo: add histogram + - [#711](https://github.com/c9s/bbgo/pull/711): upgrade material UI from v4 to v5.8.3 diff --git a/doc/release/v1.35.0.md b/doc/release/v1.35.0.md new file mode 100644 index 0000000000..4534bca4f7 --- /dev/null +++ b/doc/release/v1.35.0.md @@ -0,0 +1,20 @@ +## Fixes + +- Avoid doing truncate table in the mysql migration script. +- Fixed supertrend strategy. +- Fixed rma with adjust setting. + +## Features + +- Added heikinashi kline support +- Added DMI indicator +- Added marketcap strategy + +[Full Changelog](https://github.com/c9s/bbgo/compare/v1.34.0...main) + + - [#721](https://github.com/c9s/bbgo/pull/721): feature: add heikinashi support + - [#720](https://github.com/c9s/bbgo/pull/720): fix: fix strategy supertrend + - [#728](https://github.com/c9s/bbgo/pull/728): feature: add dmi indicator + - [#727](https://github.com/c9s/bbgo/pull/727): strategy: add marketcap strategy + - [#730](https://github.com/c9s/bbgo/pull/730): refactor: clean up unused max v2 api + - [#729](https://github.com/c9s/bbgo/pull/729): refactor: re-arrange maxapi files diff --git a/doc/strategy/grid.md b/doc/strategy/grid.md new file mode 100644 index 0000000000..3563db6212 --- /dev/null +++ b/doc/strategy/grid.md @@ -0,0 +1,32 @@ +### Grid Strategy + +This strategy places buy and sell orders within the specified price range. The gap between orders are equal, thus they +form `grids`. The price gap is calculated from price range and the number of grids. + + +#### Parameters + +- `symbol` + - The trading pair symbol, e.g., `BTCUSDT`, `ETHUSDT` +- `quantity` + - Quantity of asset per order. + You can also instead specify an amount of fiat per order with the `amount` parameter. +- `gridNumber` + - Number of grids, which is the maximum numbers of orders minus one. +- `profitSpread` + - The arbitrage profit amount of a set of buy and sell orders. In other words, the profit you want to add to your + sell order when your buy order is executed. +- `upperPrice` + - The upper bond price +- `lowerPrice` + - The lower bond price +- `long` + - If true, the sell order is submitted in the same order amount as the filled corresponding buy order, rather than + the same quantity, which means the arbitrage profit is accumulated in the base asset rather than the quote asset. +- `catchUp` + - If true, BBGO will try to submit orders for missing grids. + + +#### Examples + +See [grid.yaml](../../config/grid.yaml) diff --git a/doc/strategy/interaction.md b/doc/strategy/interaction.md new file mode 100644 index 0000000000..7f5935e99d --- /dev/null +++ b/doc/strategy/interaction.md @@ -0,0 +1,32 @@ +# Interaction + +In your strategy, you can register your messenger interaction by commands. + + +``` +package mymaker + +import ( + "github.com/c9s/bbgo/pkg/bbgo" +) + +func init() { + bbgo.RegisterInteraction(&MyInteraction{}) +} + +type MyInteraction struct {} + +func (m *MyInteraction) Commands(interact bbgo.Interact) { + interact.Command("closePosition", func(w bbgo.InteractWriter, symbol string, percentage float64) { + + }) +} + + +type Strategy struct {} +``` + + +The interaction engine parses the command from the messenger software programs like Telegram or Slack. +And then pass the arguments to the command handler defined in the strategy. + diff --git a/doc/strategy/pricealert.md b/doc/strategy/pricealert.md new file mode 100644 index 0000000000..ec40dc8a08 --- /dev/null +++ b/doc/strategy/pricealert.md @@ -0,0 +1,25 @@ +### Price Alert Strategy + +This strategy will send notifications to specified channels when the price change of the specified trading pairs is +larger than the threshold. + + +### Prerequisite +Setup Telegram/Slack notification before using Price Alert Strategy. See [Setting up Telegram Bot Notification +](../configuration/telegram.md) and [Setting up Slack Notification](../configuration/slack.md). + + +#### Parameters + +- `symbol` + - The trading pair symbol, e.g., `BTCUSDT`, `ETHUSDT` +- `interval` + - The K-line interval, e.g., `5m`, `1h` +- `minChange` + - Alert threshold, e.g., `100`, `500`. This is a fixed value of price change. Any price change in a single K-line + larger than this value will trigger the alert. + + +#### Examples + +See [pricealert.yaml](../../config/pricealert.yaml) and [pricealert-tg.yaml](../../config/pricealert-tg.yaml) \ No newline at end of file diff --git a/doc/strategy/supertrend.md b/doc/strategy/supertrend.md new file mode 100644 index 0000000000..01421c94f1 --- /dev/null +++ b/doc/strategy/supertrend.md @@ -0,0 +1,36 @@ +### Supertrend Strategy + +This strategy uses Supertrend indicator as trend, and DEMA indicator as noise filter. +Supertrend strategy needs margin enabled in order to submit short orders, and you can use `leverage` parameter to limit your risk. +**Please note, using leverage higher than 1 is highly risky.** + + +#### Parameters + +- `symbol` + - The trading pair symbol, e.g., `BTCUSDT`, `ETHUSDT` +- `interval` + - The K-line interval, e.g., `5m`, `1h` +- `leverage` + - The leverage of the orders. +- `fastDEMAWindow` + - The MA window of the fast DEMA. +- `slowDEMAWindow` + - The MA window of the slow DEMA. +- `superTrend` + - Supertrend indicator for deciding current trend. + - `averageTrueRangeWindow` + - The MA window of the ATR indicator used by Supertrend. + - `averageTrueRangeMultiplier` + - Multiplier for calculating upper and lower bond prices, the higher, the stronger the trends are, but also makes it less sensitive. +- `takeProfitMultiplier` + - TP according to ATR multiple, 0 to disable this. +- `stopLossByTriggeringK` + - Set SL price to the low of the triggering Kline. +- `tpslBySignal` + - TP/SL by reversed signals. + + +#### Examples + +See [supertrend.yaml](../../config/supertrend.yaml) \ No newline at end of file diff --git a/doc/strategy/support.md b/doc/strategy/support.md new file mode 100644 index 0000000000..0b899d4fc8 --- /dev/null +++ b/doc/strategy/support.md @@ -0,0 +1,50 @@ +### Support Strategy + +This strategy uses K-lines with high volume as support and buys the target asset, then takes profit at specified price. + + +#### Parameters + +- `symbol` + - The trading pair symbol, e.g., `BTCUSDT`, `ETHUSDT` +- `quantity` + - Quantity per order +- `interval` + - The K-line interval, e.g., `5m`, `1h` +- `minVolume` + - The threshold, e.g., `1000000`, `5000000`. A K-line with volume larger than this is seen as a support, and + triggers a market buy order. +- `triggerMovingAverage` + - The MA window in the current K-line interval to filter out noises. The closed price must be below this MA to + trigger the buy order. + - `interval` + - The K-line interval, e.g., `5m`, `1h` + - `window` + - The MA window in the specified K-line interval to filter out noises. +- `longTermMovingAverage` + - The MA window in a longer K-line interval. The closed price must be above this MA to trigger the buy order. + - `interval` + - The K-line interval, e.g., `5m`, `1h` + - `window` + - The MA window in the specified K-line interval to filter out noises. +- `maxBaseAssetBalance` + - Maximum quantity of the target asset. Orders will not be submitted if the current balance reaches this threshold. +- `minQuoteAssetBalance` + - Minimum quantity of the quote asset. Orders will not be submitted if the current balance reaches this threshold. +- `targets` + - `profitPercentage` + - Take profit ratio, e.g., 0.01 means taking profit when the price rises 1%. + - `quantityPercentage` + - The position ratio to take profit, e.g., 0.5 means selling 50% of the original buy order position when takes + profit. +- `trailingStopTarget` + - Use trailing stop to take profit + - `callbackRatio` + - Callback ratio of the trailing stop + - `minimumProfitPercentage` + - The minimum profit ratio of the trailing stop. The trailing stop is triggered when the profit is higher than the minimum. + + +#### Examples + +See [support.yaml](../../config/support.yaml) \ No newline at end of file diff --git a/doc/topics/back-testing.md b/doc/topics/back-testing.md new file mode 100644 index 0000000000..2e4db351bd --- /dev/null +++ b/doc/topics/back-testing.md @@ -0,0 +1,69 @@ +## Back-testing + +*Before you start back-testing, you need to setup [MySQL](../../README.md#configure-mysql-database) or [SQLite3 +](../../README.md#configure-sqlite3-database). Using MySQL is highly recommended.* + +First, you need to add the back-testing config to your `bbgo.yaml`: + +```yaml +backtest: + # your back-test will start at the 2021-01-10, be sure to sync the data before 2021-01-10 + # because some indicator like EMA needs more data to calculate the current EMA value. + startTime: "2021-01-10" + + # your back-test will end at the 2021-01-10 + endTime: "2021-01-21" + + # the symbol data that you want to sync and back-test + symbols: + - BTCUSDT + + sessions: + - binance + + accounts: + # the initial account balance you want to start with + binance: # exchange name + balances: + BTC: 0.0 + USDT: 10000.0 +``` + +Note on date formats, the following date formats are supported: +* RFC3339, which looks like `2006-01-02T15:04:05Z07:00` +* RFC822, which looks like `02 Jan 06 15:04 MST` +* You can also use `2021-11-26T15:04:56` + +And then, you can sync remote exchange k-lines (candle bars) data for back-testing: + +```sh +bbgo backtest -v --sync --sync-only --sync-from 2020-11-01 --config config/grid.yaml +``` + +Note that, you should sync from an earlier date before your startTime because some indicator like EMA needs more data to calculate the current EMA value. +Here we sync one month before `2021-01-10`. + +- `--sync` - sync the data to the latest data point before we start the back-test. +- `--sync-only` - only the back-test data syncing will be executed. do not run back-test. +- `--sync-from` - sync the data from a specific endpoint. note that, once you've start the sync, you can not simply add more data before the initial date. +- `-v` - verbose message output +- `--config config/grid.yaml` - use a specific config file instead of the default config file `./bbgo.yaml` + +Run back-test: + +```sh +bbgo backtest --base-asset-baseline --config config/grid.yaml +``` + +If you're developing a strategy, you might want to start with a command like this: + +```shell +godotenv -f .env.local -- go run ./cmd/bbgo backtest --config config/grid.yaml --base-asset-baseline +``` + +## See Also + +If you want to test the max draw down (MDD) you can adjust the start date to somewhere near 2020-03-12 + +See + diff --git a/doc/topics/developing-strategy.md b/doc/topics/developing-strategy.md new file mode 100644 index 0000000000..c28f590a17 --- /dev/null +++ b/doc/topics/developing-strategy.md @@ -0,0 +1,82 @@ +# Developing Strategy + +There are two types of strategies in BBGO: + +1. built-in strategy: like grid, bollmaker, pricealert strategies, which are included in the pre-compiled binary. +2. external strategy: custom or private strategies that you don't want to expose to public. + +For built-in strategies, they are placed in `pkg/strategy` of the BBGO source repository. + +For external strategies, you can create a private repository as an isolated go package and place your strategy inside +it. + +In general, strategies are Go struct, placed in Go package. + +## The Strategy Struct + +BBGO loads the YAML config file and re-unmarshal the settings into your struct as JSON string, so you can define the +json tag to get the settings from the YAML config. + +For example, if you're writing a strategy in a package called `short`, to load the following config: + +```yaml +externalStrategies: +- on: binance + short: + symbol: BTCUSDT +``` + +You can write the following struct to load the symbol setting: + +``` +package short + +type Strategy struct { + Symbol string `json:"symbol"` +} + +``` + +To use the Symbol setting, you can get the value from the Run method of the strategy: + +``` +func (s *Strategy) Run(ctx context.Context, session *bbgo.ExchangeSession) error { + // you need to import the "log" package + log.Println("%s", s.Symbol) + return nil +} +``` + +Now you have the Go struct and the Go package, but BBGO does not know your strategy, +so you need to register your strategy. + +Define an ID const in your package: + +``` +const ID = "short" +``` + +Then call bbgo.RegisterStrategy with the ID you just defined and a struct reference: + +``` +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} +``` + +Note that you don't need to fill the fields in the struct, BBGO just need to know the type of struct. + +(BBGO use reflect to parse the fields from the given struct and allocate a new struct object from the given struct type internally) + + +## Built-in Strategy + + + + + + + + + + diff --git a/doc/topics/dnum-binary.md b/doc/topics/dnum-binary.md new file mode 100644 index 0000000000..c6454e7945 --- /dev/null +++ b/doc/topics/dnum-binary.md @@ -0,0 +1,26 @@ +## Dnum: High Precision Numeric Implementation +---------------------------------------------- +The `dnum` version of `fixedpoint` supports up to 16 digits of decimal precision. It's two times slower than the legacy version, which only supports up to 8 digits of decimal precision. We recommend that strategy developers do algorithmic calculations in `float64`, then convert them back to `fixedpoint` to interact with exchanges to keep the balance between speed and the accuracy of accounting result. + +To Install dnum version of bbgo, we've create several scripts for quick setup: + +```sh +# grid trading strategy for binance exchange +bash <(curl -s https://raw.githubusercontent.com/c9s/bbgo/main/scripts/setup-grid-dnum.sh) binance + +# grid trading strategy for max exchange +bash <(curl -s https://raw.githubusercontent.com/c9s/bbgo/main/scripts/setup-grid-dnum.sh) max + +# bollinger grid trading strategy for binance exchange +bash <(curl -s https://raw.githubusercontent.com/c9s/bbgo/main/scripts/setup-bollgrid-dnum.sh) binance + +# bollinger grid trading strategy for max exchange +bash <(curl -s https://raw.githubusercontent.com/c9s/bbgo/main/scripts/setup-bollgrid-dnum.sh) max +``` + +If you already have the configuration somewhere, you may want to use the download-only script: +```sh +bash <(curl -s https://raw.githubusercontent.com/c9s/bbgo/main/scripts/download-dnum.sh) +``` + +The precompiled dnum binaries are also available in the [Release Page](https://github.com/c9s/bbgo/releases). diff --git a/doc/topics/grpc.md b/doc/topics/grpc.md new file mode 100644 index 0000000000..aee2d15126 --- /dev/null +++ b/doc/topics/grpc.md @@ -0,0 +1,54 @@ +# GRPC server + +## Integrating GRPC services + +### Install Evans + +```shell +brew tap ktr0731/evans +brew install evans +``` + +Start your bbgo with gRPC server option: + +```shell +go run ./cmd/bbgo run --config grid_kucoin.yaml --debug --enable-grpc +``` + +The gRPC server port is located at 50051 (default port), you can use evans to connect to the gRPC server: + +```shell +evans --host localhost --port 50051 -r repl +``` + +```shell +bbgo@localhost:6688> package bbgo +bbgo@localhost:6688> show service +bbgo@localhost:6688> show message +``` + +You can use evans to get the description of a message: + +```shell +bbgo@localhost:6688> desc QueryKLinesRequest ++-----------+-------------+----------+ +| FIELD | TYPE | REPEATED | ++-----------+-------------+----------+ +| exchange | TYPE_STRING | false | +| interval | TYPE_STRING | false | +| limit | TYPE_INT64 | false | +| symbol | TYPE_STRING | false | +| timestamp | TYPE_INT64 | false | ++-----------+-------------+----------+ +``` + + +You can send the request via evans: + +```shell +evans -r cli call --file evans/userDataService/subscribe.json bbgo.UserDataService.Subscribe +evans -r cli call --file evans/marketDataService/subscribe_kline.json bbgo.MarketDataService.Subscribe +``` + + + diff --git a/doc/topics/twap.md b/doc/topics/twap.md new file mode 100644 index 0000000000..44232068e1 --- /dev/null +++ b/doc/topics/twap.md @@ -0,0 +1,39 @@ +## TWAP Order Execution + +Yes, bbgo supports TWAP order execution. If you have a large quantity order want to execute, +you can use this feature to update your order price according to the first bid/ask price in the order book. + + +### Usage + +``` +bbgo execute-order --session binance --symbol=BTCUSDT \ + --side=sell \ + --target-quantity=100.0 \ + --slice-quantity=0.01 \ + --stop-price=58000 +``` + +The above command will sell 100 BTC in total and for each slice it places a limit sell order with 0.01 BTC, and only place the sell order when price is above 58000. + +``` +bbgo execute-order --session max --symbol=USDTTWD --side=sell --target-quantity=1000.0 --slice-quantity=100.0 --stop-price=28.90 +``` + +`--symbol=SYMBOL` is the symbol of the market, the symbol should be in upper-case, for example, `USDTTWD` or `BTCUSDT` + +`--side=SIDE` is the side of your order. can be `buy` or `sell`. + +`--target-quantity=TARGET_QUANTITY` is the final quantity you want to buy/sell. + +`--slice-quantity=SLICE_QUANTITY` is the slice quantity per order. for example, if you have targetQuantity=100.0 and sliceQuantity=10.0, then the order will be split into 10 orders. + +`--stop-price` the highest/lowest price of your order. for example, the current best bid/ask price is 28.65 and 28.70, +if your stop price for BUY is `28.60`, your BUY order will stay at price `28.6` and won't be updated. +if your stop price for SELL is `28.75`, your SELL order will stay at price `28.75` and won't be updated. + +`--price-ticks` the incremental tick spread of the price. for example, the current best bid/ask price is 28.00 and 28.10, +the single tick of the USDT/TWD symbol is 0.01, +if you set `--price-ticks=2`, then the order executor will use 28.00 + 0.01 * 2 for your BUY order, and use 28.10 - 0.01 * 2 for your SELL order. + +`--deadline` the deadline duration of your order execution, if time exceeded the deadline time, then the rest quantity will be sent as a market order. diff --git a/evans/marketDataService/subscribe_book_kucoin.json b/evans/marketDataService/subscribe_book_kucoin.json new file mode 100644 index 0000000000..6e9a833e7f --- /dev/null +++ b/evans/marketDataService/subscribe_book_kucoin.json @@ -0,0 +1,10 @@ +{ + "subscriptions": [ + { + "exchange": "kucoin", + "symbol": "BTCUSDT", + "channel": "BOOK", + "depth": "full" + } + ] +} diff --git a/evans/marketDataService/subscribe_kline_kucoin.json b/evans/marketDataService/subscribe_kline_kucoin.json new file mode 100644 index 0000000000..2b7f167453 --- /dev/null +++ b/evans/marketDataService/subscribe_kline_kucoin.json @@ -0,0 +1,10 @@ +{ + "subscriptions": [ + { + "exchange": "kucoin", + "symbol": "BTCUSDT", + "channel": "KLINE", + "interval": "1m" + } + ] +} diff --git a/evans/tradingService/submit_order_max.json b/evans/tradingService/submit_order_max.json new file mode 100644 index 0000000000..29ac2010cc --- /dev/null +++ b/evans/tradingService/submit_order_max.json @@ -0,0 +1,13 @@ +{ + "session": "max", + "submit_orders": [ + { + "exchange": "max", + "symbol": "BTCTWD", + "side": "BUY", + "price": "20000", + "quantity": "0.02", + "order_type": "LIMIT" + } + ] +} diff --git a/evans/userDataService/subscribe_binance.json b/evans/userDataService/subscribe_binance.json new file mode 100644 index 0000000000..2e95a3bcdd --- /dev/null +++ b/evans/userDataService/subscribe_binance.json @@ -0,0 +1,3 @@ +{ + "session": "binance" +} diff --git a/evans/userDataService/subscribe_kucoin.json b/evans/userDataService/subscribe_kucoin.json new file mode 100644 index 0000000000..832b4b44c2 --- /dev/null +++ b/evans/userDataService/subscribe_kucoin.json @@ -0,0 +1,3 @@ +{ + "session": "kucoin" +} diff --git a/examples/binance-book/main.go b/examples/binance-book/main.go index 9823bc454a..42d669a071 100644 --- a/examples/binance-book/main.go +++ b/examples/binance-book/main.go @@ -48,37 +48,41 @@ var rootCmd = &cobra.Command{ stream.SetPublicOnly() stream.Subscribe(types.BookChannel, symbol, types.SubscribeOptions{}) - stream.OnBookSnapshot(func(book types.OrderBook) { - // log.Infof("book snapshot: %+v", book) - }) - - stream.OnBookUpdate(func(book types.OrderBook) { - // log.Infof("book update: %+v", book) - }) - - streambook := types.NewStreamBook(symbol) - streambook.BindStream(stream) - streambook.OnUpdate(func(book *types.OrderBook) { - bestBid, hasBid := book.BestBid() - bestAsk, hasAsk := book.BestAsk() - - if !book.IsValid() { - log.Warnf("order book is invalid") - return - } + streamBook := types.NewStreamBook(symbol) + streamBook.BindStream(stream) + + go func() { + for { + select { + + case <-ctx.Done(): + return + + case <-streamBook.C: + book := streamBook.Copy() + + if valid, err := book.IsValid(); !valid { + log.Errorf("order book is invalid, error: %v", err) + continue + } + + bestBid, hasBid := book.BestBid() + bestAsk, hasAsk := book.BestAsk() + if hasBid && hasAsk { + log.Infof("================================") + log.Infof("best ask %f % -12f", + bestAsk.Price.Float64(), + bestAsk.Volume.Float64(), + ) + log.Infof("best bid %f % -12f", + bestBid.Price.Float64(), + bestBid.Volume.Float64(), + ) + } + } - if hasBid && hasAsk { - log.Infof("================================") - log.Infof("best ask %f % -12f", - bestAsk.Price.Float64(), - bestAsk.Volume.Float64(), - ) - log.Infof("best bid %f % -12f", - bestBid.Price.Float64(), - bestBid.Volume.Float64(), - ) } - }) + }() log.Info("connecting websocket...") if err := stream.Connect(ctx); err != nil { diff --git a/examples/create-self-trade/main.go b/examples/create-self-trade/main.go new file mode 100644 index 0000000000..d8533b463e --- /dev/null +++ b/examples/create-self-trade/main.go @@ -0,0 +1,141 @@ +package main + +import ( + "context" + "fmt" + "math" + "strings" + "syscall" + "time" + + "github.com/joho/godotenv" + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + "github.com/spf13/viper" + + "github.com/c9s/bbgo/pkg/cmd/cmdutil" + exchange2 "github.com/c9s/bbgo/pkg/exchange" + "github.com/c9s/bbgo/pkg/types" +) + +func init() { + rootCmd.PersistentFlags().String("exchange", "binance", "exchange name") + rootCmd.PersistentFlags().String("symbol", "SANDUSDT", "symbol") +} + +var rootCmd = &cobra.Command{ + Use: "create-self-trade", + Short: "this program creates the self trade by getting the market ticker", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + RunE: func(cmd *cobra.Command, args []string) error { + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + if err := godotenv.Load(".env.local"); err != nil { + log.Fatal(err) + } + + symbol, err := cmd.Flags().GetString("symbol") + if err != nil { + return err + } + + exchangeNameStr, err := cmd.Flags().GetString("exchange") + if err != nil { + return err + } + + exchangeName, err := types.ValidExchangeName(exchangeNameStr) + if err != nil { + return err + } + + exchange, err := exchange2.New(exchangeName) + if err != nil { + return err + } + + markets, err := exchange.QueryMarkets(ctx) + if err != nil { + return err + } + + market, ok := markets[symbol] + if !ok { + return fmt.Errorf("market %s is not defined", symbol) + } + + stream := exchange.NewStream() + stream.OnTradeUpdate(func(trade types.Trade) { + log.Infof("trade: %+v", trade) + }) + + log.Info("connecting websocket...") + if err := stream.Connect(ctx); err != nil { + log.Fatal(err) + } + + time.Sleep(time.Second) + + ticker, err := exchange.QueryTicker(ctx, symbol) + if err != nil { + log.Fatal(err) + } + + price := ticker.Buy + market.TickSize + + if int64(ticker.Sell*1e8) == int64(price*1e8) { + log.Fatal("zero spread, can not continue") + } + + quantity := math.Max(market.MinNotional/price, market.MinQuantity) * 1.1 + + log.Infof("submiting order using quantity %f at price %f", quantity, price) + + createdOrders, err := exchange.SubmitOrders(ctx, []types.SubmitOrder{ + { + Symbol: symbol, + Market: market, + Side: types.SideTypeBuy, + Type: types.OrderTypeLimit, + Price: price, + Quantity: quantity, + TimeInForce: "GTC", + }, + { + Symbol: symbol, + Market: market, + Side: types.SideTypeSell, + Type: types.OrderTypeLimit, + Price: price, + Quantity: quantity, + TimeInForce: "GTC", + }, + }...) + + if err != nil { + return err + } + + log.Info(createdOrders) + + cmdutil.WaitForSignal(ctx, syscall.SIGINT, syscall.SIGTERM) + return nil + }, +} + +func main() { + viper.AutomaticEnv() + viper.SetEnvKeyReplacer(strings.NewReplacer("-", "_")) + + if err := viper.BindPFlags(rootCmd.PersistentFlags()); err != nil { + log.WithError(err).Error("bind pflags error") + } + + if err := rootCmd.ExecuteContext(context.Background()); err != nil { + log.WithError(err).Error("cmd error") + } +} diff --git a/examples/interact/main.go b/examples/interact/main.go new file mode 100644 index 0000000000..650f3299a2 --- /dev/null +++ b/examples/interact/main.go @@ -0,0 +1,150 @@ +package main + +import ( + "context" + "fmt" + "os" + "strconv" + "strings" + "syscall" + "time" + + log "github.com/sirupsen/logrus" + prefixed "github.com/x-cray/logrus-prefixed-formatter" + tb "gopkg.in/tucnak/telebot.v2" + + "github.com/c9s/bbgo/pkg/cmd/cmdutil" + "github.com/c9s/bbgo/pkg/interact" +) + +func parseFloatPercent(s string, bitSize int) (f float64, err error) { + i := strings.Index(s, "%") + if i < 0 { + return strconv.ParseFloat(s, bitSize) + } + + f, err = strconv.ParseFloat(s[:i], bitSize) + if err != nil { + return 0, err + } + return f / 100.0, nil +} + +type closePositionTask struct { + symbol string + percentage float64 + confirmed bool +} + +type PositionInteraction struct { + closePositionTask closePositionTask +} + +func (m *PositionInteraction) Commands(i *interact.Interact) { + i.Command("/closePosition", "", func(reply interact.Reply) error { + // send symbol options + reply.Message("Choose your position") + for _, symbol := range []string{"BTCUSDT", "ETHUSDT"} { + reply.AddButton(symbol, symbol, symbol) + } + + return nil + }).Next(func(reply interact.Reply, symbol string) error { + // get symbol from user + if len(symbol) == 0 { + reply.Message("Please enter a symbol") + return fmt.Errorf("empty symbol") + } + switch symbol { + case "BTCUSDT", "ETHUSDT": + + default: + reply.Message("Invalid symbol") + return fmt.Errorf("invalid symbol") + + } + + m.closePositionTask.symbol = symbol + + reply.Message("Choose or enter the percentage to close") + for _, symbol := range []string{"25%", "50%", "100%"} { + reply.AddButton(symbol, symbol, symbol) + } + + // send percentage options + return nil + }).Next(func(reply interact.Reply, percentageStr string) error { + p, err := parseFloatPercent(percentageStr, 64) + if err != nil { + reply.Message("Not a valid percentage string") + return err + } + + // get percentage from user + m.closePositionTask.percentage = p + + // send confirmation + reply.Message("Are you sure to close the position?") + reply.AddButton("Yes", "confirm", "yes") + return nil + }).Next(func(reply interact.Reply, confirm string) error { + switch strings.ToLower(confirm) { + case "yes": + m.closePositionTask.confirmed = true + reply.Message(fmt.Sprintf("Your %s position is closed", m.closePositionTask.symbol)) + + default: + + } + + // call position close + if kc, ok := reply.(interact.KeyboardController); ok { + kc.RemoveKeyboard() + } + + // reply result + return nil + }) +} + +func main() { + log.SetFormatter(&prefixed.TextFormatter{}) + + b, err := tb.NewBot(tb.Settings{ + // You can also set custom API URL. + // If field is empty it equals to "https://api.telegram.org". + // URL: "http://195.129.111.17:8012", + + Token: os.Getenv("TELEGRAM_BOT_TOKEN"), + Poller: &tb.LongPoller{Timeout: 10 * time.Second}, + // Synchronous: false, + // Verbose: true, + // ParseMode: "", + // Reporter: nil, + // Client: nil, + // Offline: false, + }) + + if err != nil { + log.Fatal(err) + return + } + + ctx := context.Background() + interact.AddMessenger(&interact.Telegram{ + Private: true, + Bot: b, + }) + + interact.AddCustomInteraction(&interact.AuthInteract{ + Strict: true, + Mode: interact.AuthModeToken, + Token: "123", + }) + + interact.AddCustomInteraction(&PositionInteraction{}) + if err := interact.Start(ctx); err != nil { + log.Fatal(err) + } + cmdutil.WaitForSignal(ctx, syscall.SIGINT, syscall.SIGTERM) +} diff --git a/examples/kucoin-accounts/main.go b/examples/kucoin-accounts/main.go new file mode 100644 index 0000000000..669658ca2c --- /dev/null +++ b/examples/kucoin-accounts/main.go @@ -0,0 +1,72 @@ +package main + +import ( + "context" + "os" + "strings" + + "github.com/joho/godotenv" + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + "github.com/spf13/viper" + + "github.com/c9s/bbgo/pkg/exchange/kucoin/kucoinapi" +) + +func init() { + rootCmd.PersistentFlags().String("kucoin-api-key", "", "okex api key") + rootCmd.PersistentFlags().String("kucoin-api-secret", "", "okex api secret") + rootCmd.PersistentFlags().String("kucoin-api-passphrase", "", "okex api secret") +} + +var rootCmd = &cobra.Command{ + Use: "kucoin-accounts", + Short: "kucoin accounts", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + RunE: func(cmd *cobra.Command, args []string) error { + req := client.AccountService.NewListAccountsRequest() + accounts, err := req.Do(context.Background()) + if err != nil { + return err + } + + log.Infof("accounts: %+v", accounts) + return nil + }, +} + +var client *kucoinapi.RestClient = nil + +func main() { + if _, err := os.Stat(".env.local"); err == nil { + if err := godotenv.Load(".env.local"); err != nil { + log.Fatal(err) + } + } + + viper.AutomaticEnv() + viper.SetEnvKeyReplacer(strings.NewReplacer("-", "_")) + + if err := viper.BindPFlags(rootCmd.PersistentFlags()); err != nil { + log.WithError(err).Error("bind pflags error") + } + + client = kucoinapi.NewClient() + + key, secret, passphrase := viper.GetString("kucoin-api-key"), + viper.GetString("kucoin-api-secret"), + viper.GetString("kucoin-api-passphrase") + + if len(key) == 0 || len(secret) == 0 || len(passphrase) == 0 { + log.Fatal("empty key, secret or passphrase") + } + + client.Auth(key, secret, passphrase) + + if err := rootCmd.ExecuteContext(context.Background()); err != nil { + log.WithError(err).Error("cmd error") + } +} diff --git a/examples/kucoin/accounts.go b/examples/kucoin/accounts.go new file mode 100644 index 0000000000..7e09e13cc6 --- /dev/null +++ b/examples/kucoin/accounts.go @@ -0,0 +1,41 @@ +package main + +import ( + "context" + + "github.com/sirupsen/logrus" + "github.com/spf13/cobra" +) + +func init() { + rootCmd.AddCommand(accountsCmd) +} + +var accountsCmd = &cobra.Command{ + Use: "accounts", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + RunE: func(cmd *cobra.Command, args []string) error { + if len(args) > 0 { + req := client.AccountService.NewGetAccountRequest(args[0]) + account, err := req.Do(context.Background()) + if err != nil { + return err + } + + logrus.Infof("account: %+v", account) + return nil + } + + req := client.AccountService.NewListAccountsRequest() + accounts, err := req.Do(context.Background()) + if err != nil { + return err + } + + logrus.Infof("accounts: %+v", accounts) + return nil + }, +} diff --git a/examples/kucoin/bullet.go b/examples/kucoin/bullet.go new file mode 100644 index 0000000000..cd09e01af1 --- /dev/null +++ b/examples/kucoin/bullet.go @@ -0,0 +1,55 @@ +package main + +import ( + "context" + "errors" + + "github.com/sirupsen/logrus" + "github.com/spf13/cobra" +) + +func init() { + rootCmd.AddCommand(bulletCmd) +} + +var bulletCmd = &cobra.Command{ + Use: "bullet", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + Args: cobra.ExactArgs(1), + + RunE: func(cmd *cobra.Command, args []string) error { + if len(args) == 0 { + return nil + } + + ctx := context.Background() + t := args[0] + + switch t { + case "public": + bullet, err := client.BulletService.NewGetPublicBulletRequest().Do(ctx) + if err != nil { + return err + } + + logrus.Infof("public bullet: %+v", bullet) + + case "private": + bullet, err := client.BulletService.NewGetPrivateBulletRequest().Do(ctx) + if err != nil { + return err + } + + logrus.Infof("private bullet: %+v", bullet) + + default: + return errors.New("valid bullet type: public, private") + + } + + return nil + }, +} diff --git a/examples/kucoin/fills.go b/examples/kucoin/fills.go new file mode 100644 index 0000000000..7241be9da1 --- /dev/null +++ b/examples/kucoin/fills.go @@ -0,0 +1,45 @@ +package main + +import ( + "context" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/spf13/cobra" +) + +func init() { + fillsCmd.Flags().String("symbol", "", "symbol, BTC-USDT, LTC-USDT...etc") + rootCmd.AddCommand(fillsCmd) +} + +// go run ./examples/kucoin fills +var fillsCmd = &cobra.Command{ + Use: "fills", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + RunE: func(cmd *cobra.Command, args []string) error { + + symbol, err := cmd.Flags().GetString("symbol") + if err != nil { + return err + } + + if len(symbol) == 0 { + return errors.New("--symbol option is required") + } + + req := client.TradeService.NewGetFillsRequest() + req.Symbol(symbol) + + page, err := req.Do(context.Background()) + if err != nil { + return err + } + + logrus.Infof("page: %+v", page) + return nil + }, +} diff --git a/examples/kucoin/main.go b/examples/kucoin/main.go new file mode 100644 index 0000000000..812f90537d --- /dev/null +++ b/examples/kucoin/main.go @@ -0,0 +1,65 @@ +package main + +import ( + "context" + "os" + "strings" + + "github.com/c9s/bbgo/pkg/exchange/kucoin/kucoinapi" + "github.com/joho/godotenv" + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + "github.com/spf13/viper" +) + +func init() { + rootCmd.PersistentFlags().String("kucoin-api-key", "", "okex api key") + rootCmd.PersistentFlags().String("kucoin-api-secret", "", "okex api secret") + rootCmd.PersistentFlags().String("kucoin-api-passphrase", "", "okex api secret") + +} + +var rootCmd = &cobra.Command{ + Use: "kucoin", + Short: "kucoin", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + RunE: func(cmd *cobra.Command, args []string) error { + return nil + }, +} + +var client *kucoinapi.RestClient = nil + +func main() { + if _, err := os.Stat(".env.local"); err == nil { + if err := godotenv.Load(".env.local"); err != nil { + log.Fatal(err) + } + } + + viper.AutomaticEnv() + viper.SetEnvKeyReplacer(strings.NewReplacer("-", "_")) + + if err := viper.BindPFlags(rootCmd.PersistentFlags()); err != nil { + log.WithError(err).Error("bind pflags error") + } + + client = kucoinapi.NewClient() + + key, secret, passphrase := viper.GetString("kucoin-api-key"), + viper.GetString("kucoin-api-secret"), + viper.GetString("kucoin-api-passphrase") + + if len(key) == 0 || len(secret) == 0 || len(passphrase) == 0 { + log.Fatal("empty key, secret or passphrase") + } + + client.Auth(key, secret, passphrase) + + if err := rootCmd.ExecuteContext(context.Background()); err != nil { + log.WithError(err).Error("cmd error") + } +} diff --git a/examples/kucoin/orderbook.go b/examples/kucoin/orderbook.go new file mode 100644 index 0000000000..ba99e10773 --- /dev/null +++ b/examples/kucoin/orderbook.go @@ -0,0 +1,44 @@ +package main + +import ( + "strconv" + + "github.com/sirupsen/logrus" + "github.com/spf13/cobra" +) + +func init() { + rootCmd.AddCommand(orderbookCmd) +} + +var orderbookCmd = &cobra.Command{ + Use: "orderbook", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + Args: cobra.MinimumNArgs(1), + + RunE: func(cmd *cobra.Command, args []string) error { + if len(args) == 0 { + return nil + } + + var depth = 0 + if len(args) > 1 { + v, err := strconv.Atoi(args[1]) + if err != nil { + return err + } + depth = v + } + + orderBook, err := client.MarketDataService.GetOrderBook(args[0], depth) + if err != nil { + return err + } + + logrus.Infof("orderBook: %+v", orderBook) + return nil + }, +} diff --git a/examples/kucoin/orders.go b/examples/kucoin/orders.go new file mode 100644 index 0000000000..4acb01cbee --- /dev/null +++ b/examples/kucoin/orders.go @@ -0,0 +1,221 @@ +package main + +import ( + "context" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + + "github.com/c9s/bbgo/pkg/exchange/kucoin/kucoinapi" +) + +func init() { + ordersCmd.Flags().String("symbol", "", "symbol, BTC-USDT, LTC-USDT...etc") + ordersCmd.Flags().String("status", "", "status, active or done") + rootCmd.AddCommand(ordersCmd) + + cancelOrderCmd.Flags().String("client-order-id", "", "client order id") + cancelOrderCmd.Flags().String("order-id", "", "order id") + cancelOrderCmd.Flags().Bool("all", false, "cancel all") + ordersCmd.AddCommand(cancelOrderCmd) + + placeOrderCmd.Flags().String("symbol", "", "symbol") + placeOrderCmd.Flags().String("price", "", "price") + placeOrderCmd.Flags().String("size", "", "size") + placeOrderCmd.Flags().String("order-type", string(kucoinapi.OrderTypeLimit), "order type") + placeOrderCmd.Flags().String("side", "", "buy or sell") + ordersCmd.AddCommand(placeOrderCmd) + + historyOrdersCmd.Flags().String("symbol", "", "symbol, BTC-USDT, LTC-USDT...etc") + ordersCmd.AddCommand(historyOrdersCmd) +} + +// go run ./examples/kucoin orders +var ordersCmd = &cobra.Command{ + Use: "orders", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + RunE: func(cmd *cobra.Command, args []string) error { + + symbol, err := cmd.Flags().GetString("symbol") + if err != nil { + return err + } + + if len(symbol) == 0 { + return errors.New("--symbol option is required") + } + + status, err := cmd.Flags().GetString("status") + if err != nil { + return err + } + + req := client.TradeService.NewListOrdersRequest() + req.Symbol(symbol) + + if len(status) > 0 { + req.Status(status) + } + + page, err := req.Do(context.Background()) + if err != nil { + return err + } + + logrus.Infof("page: %+v", page) + return nil + }, +} + +// go run ./examples/kucoin orders history +var historyOrdersCmd = &cobra.Command{ + Use: "history [--symbol SYMBOL]", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + RunE: func(cmd *cobra.Command, args []string) error { + symbol, err := cmd.Flags().GetString("symbol") + if err != nil { + return err + } + + if len(symbol) == 0 { + return errors.New("--symbol option is required") + } + + req := client.TradeService.NewListHistoryOrdersRequest() + req.Symbol(symbol) + req.StartAt(time.Now().AddDate(0, -2, 0)) + + page, err := req.Do(context.Background()) + if err != nil { + return err + } + + logrus.Infof("page: %+v", page) + return nil + }, +} + +// usage: +// go run ./examples/kucoin orders place --symbol LTC-USDT --price 50 --size 1 --order-type limit --side buy +var placeOrderCmd = &cobra.Command{ + Use: "place", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + RunE: func(cmd *cobra.Command, args []string) error { + req := client.TradeService.NewPlaceOrderRequest() + + orderType, err := cmd.Flags().GetString("order-type") + if err != nil { + return err + } + + req.OrderType(kucoinapi.OrderType(orderType)) + + side, err := cmd.Flags().GetString("side") + if err != nil { + return err + } + req.Side(kucoinapi.SideType(side)) + + symbol, err := cmd.Flags().GetString("symbol") + if err != nil { + return err + } + + if len(symbol) == 0 { + return errors.New("--symbol is required") + } + + req.Symbol(symbol) + + switch kucoinapi.OrderType(orderType) { + case kucoinapi.OrderTypeLimit: + price, err := cmd.Flags().GetString("price") + if err != nil { + return err + } + req.Price(price) + + case kucoinapi.OrderTypeMarket: + + } + + size, err := cmd.Flags().GetString("size") + if err != nil { + return err + } + req.Size(size) + + response, err := req.Do(context.Background()) + if err != nil { + return err + } + + logrus.Infof("place order response: %+v", response) + return nil + }, +} + +// usage: +var cancelOrderCmd = &cobra.Command{ + Use: "cancel", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + RunE: func(cmd *cobra.Command, args []string) error { + + cancelAll, err := cmd.Flags().GetBool("all") + if err != nil { + return err + } + + if cancelAll { + req := client.TradeService.NewCancelAllOrderRequest() + req.TradeType("TRADE") + response, err := req.Do(context.Background()) + if err != nil { + return err + } + logrus.Infof("cancel all order response: %+v", response) + return nil + } + + req := client.TradeService.NewCancelOrderRequest() + orderID, err := cmd.Flags().GetString("order-id") + if err != nil { + return err + } + + clientOrderID, err := cmd.Flags().GetString("client-order-id") + if err != nil { + return err + } + + if len(orderID) > 0 { + req.OrderID(orderID) + } else if len(clientOrderID) > 0 { + req.ClientOrderID(clientOrderID) + } else { + return errors.New("either order id or client order id is required") + } + + response, err := req.Do(context.Background()) + if err != nil { + return err + } + + logrus.Infof("cancel order response: %+v", response) + return nil + }, +} diff --git a/examples/kucoin/subaccounts.go b/examples/kucoin/subaccounts.go new file mode 100644 index 0000000000..c274930d7e --- /dev/null +++ b/examples/kucoin/subaccounts.go @@ -0,0 +1,31 @@ +package main + +import ( + "context" + + "github.com/sirupsen/logrus" + "github.com/spf13/cobra" +) + +func init() { + rootCmd.AddCommand(subAccountsCmd) +} + +var subAccountsCmd = &cobra.Command{ + Use: "subaccounts", + Short: "subaccounts", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + RunE: func(cmd *cobra.Command, args []string) error { + req := client.AccountService.NewListSubAccountsRequest() + subAccounts, err := req.Do(context.Background()) + if err != nil { + return err + } + + logrus.Infof("subAccounts: %+v", subAccounts) + return nil + }, +} diff --git a/examples/kucoin/symbols.go b/examples/kucoin/symbols.go new file mode 100644 index 0000000000..dd2420ce22 --- /dev/null +++ b/examples/kucoin/symbols.go @@ -0,0 +1,27 @@ +package main + +import ( + "github.com/sirupsen/logrus" + "github.com/spf13/cobra" +) + +func init() { + rootCmd.AddCommand(symbolsCmd) +} + +var symbolsCmd = &cobra.Command{ + Use: "symbols", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + RunE: func(cmd *cobra.Command, args []string) error { + symbols, err := client.MarketDataService.ListSymbols(args...) + if err != nil { + return err + } + + logrus.Infof("symbols: %+v", symbols) + return nil + }, +} diff --git a/examples/kucoin/tickers.go b/examples/kucoin/tickers.go new file mode 100644 index 0000000000..0da748783e --- /dev/null +++ b/examples/kucoin/tickers.go @@ -0,0 +1,47 @@ +package main + +import ( + "context" + + "github.com/sirupsen/logrus" + "github.com/spf13/cobra" +) + +func init() { + rootCmd.AddCommand(tickersCmd) +} + +var tickersCmd = &cobra.Command{ + Use: "tickers", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + RunE: func(cmd *cobra.Command, args []string) error { + if len(args) == 0 { + allTickers, err := client.MarketDataService.ListTickers() + if err != nil { + return err + } + + logrus.Infof("allTickers: %+v", allTickers) + return nil + } + + req := client.MarketDataService.NewGetTickerRequest(args[0]) + ticker, err := req.Do(context.Background()) + if err != nil { + return err + } + + logrus.Infof("ticker: %+v", ticker) + + tickerStats, err := client.MarketDataService.GetTicker24HStat(args[0]) + if err != nil { + return err + } + + logrus.Infof("ticker 24h stats: %+v", tickerStats) + return nil + }, +} diff --git a/examples/kucoin/websocket.go b/examples/kucoin/websocket.go new file mode 100644 index 0000000000..d522637d7c --- /dev/null +++ b/examples/kucoin/websocket.go @@ -0,0 +1,155 @@ +package main + +import ( + "context" + "errors" + "os" + "os/signal" + "time" + + "github.com/c9s/bbgo/pkg/exchange/kucoin" + "github.com/c9s/bbgo/pkg/exchange/kucoin/kucoinapi" + "github.com/gorilla/websocket" + "github.com/sirupsen/logrus" + "github.com/spf13/cobra" +) + +func init() { + rootCmd.AddCommand(websocketCmd) +} + +var websocketCmd = &cobra.Command{ + Use: "websocket", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + Args: cobra.ExactArgs(1), + + RunE: func(cmd *cobra.Command, args []string) error { + if len(args) == 0 { + return nil + } + + var ctx = context.Background() + var t = args[0] + var err error + var bullet *kucoinapi.Bullet + + switch t { + case "public": + bullet, err = client.BulletService.NewGetPublicBulletRequest().Do(ctx) + if err != nil { + return err + } + + logrus.Infof("public bullet: %+v", bullet) + + case "private": + bullet, err = client.BulletService.NewGetPrivateBulletRequest().Do(ctx) + if err != nil { + return err + } + + logrus.Infof("private bullet: %+v", bullet) + + default: + return errors.New("valid bullet type: public, private") + + } + + u, err := bullet.URL() + if err != nil { + return err + } + + interrupt := make(chan os.Signal, 1) + signal.Notify(interrupt, os.Interrupt) + + logrus.Infof("connecting %s", u.String()) + c, _, err := websocket.DefaultDialer.Dial(u.String(), nil) + if err != nil { + return err + } + + defer c.Close() + + id := time.Now().UnixNano() / int64(time.Millisecond) + wsCmds := []kucoin.WebSocketCommand{ + /* + { + Id: id+1, + Type: "subscribe", + Topic: "/market/ticker:ETH-USDT", + PrivateChannel: false, + Response: true, + }, + */ + { + Id: id + 2, + Type: "subscribe", + Topic: "/market/candles:ETH-USDT_1min", + PrivateChannel: false, + Response: true, + }, + } + + for _, wsCmd := range wsCmds { + err = c.WriteJSON(wsCmd) + if err != nil { + return err + } + } + + done := make(chan struct{}) + go func() { + defer close(done) + for { + _, message, err := c.ReadMessage() + if err != nil { + logrus.Infoln("read:", err) + return + } + + logrus.Infof("recv: %s", message) + } + }() + + pingTicker := time.NewTicker(bullet.PingInterval()) + defer pingTicker.Stop() + + for { + select { + case <-done: + return nil + + case <-pingTicker.C: + if err := c.WriteJSON(kucoin.WebSocketCommand{ + Id: time.Now().UnixNano() / int64(time.Millisecond), + Type: "ping", + }); err != nil { + logrus.WithError(err).Error("websocket ping error", err) + } + + case <-interrupt: + logrus.Infof("interrupt") + + // Cleanly close the connection by sending a close message and then + // waiting (with timeout) for the server to close the connection. + err := c.WriteMessage(websocket.CloseMessage, websocket.FormatCloseMessage(websocket.CloseNormalClosure, "")) + if err != nil { + logrus.Error("write close:", err) + return nil + } + + select { + case <-done: + case <-time.After(time.Second): + } + return nil + } + } + + return nil + }, +} diff --git a/examples/max-eqmaker/main.go b/examples/max-eqmaker/main.go deleted file mode 100644 index bf5d0f135d..0000000000 --- a/examples/max-eqmaker/main.go +++ /dev/null @@ -1,260 +0,0 @@ -package main - -import ( - "context" - "math" - "strings" - "syscall" - "time" - - "github.com/pkg/errors" - log "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - "github.com/spf13/viper" - - "github.com/c9s/bbgo/pkg/cmd/cmdutil" - "github.com/c9s/bbgo/pkg/exchange/max" - maxapi "github.com/c9s/bbgo/pkg/exchange/max/maxapi" - "github.com/c9s/bbgo/pkg/fixedpoint" - "github.com/c9s/bbgo/pkg/types" - "github.com/c9s/bbgo/pkg/util" -) - -func init() { - rootCmd.PersistentFlags().String("max-api-key", "", "max api key") - rootCmd.PersistentFlags().String("max-api-secret", "", "max api secret") - rootCmd.PersistentFlags().String("symbol", "maxusdt", "symbol") - - rootCmd.Flags().String("side", "buy", "side") - rootCmd.Flags().Int("num-orders", 5, "number of orders for one side") - rootCmd.Flags().Float64("behind-volume", 1000.0, "behind volume depth") - rootCmd.Flags().Float64("base-quantity", 100.0, "base quantity") - rootCmd.Flags().Float64("price-tick", 0.02, "price tick") - rootCmd.Flags().Float64("buy-sell-ratio", 1.0, "price tick") -} - -var rootCmd = &cobra.Command{ - Use: "trade", - Short: "start trader", - - // SilenceUsage is an option to silence usage when an error occurs. - SilenceUsage: true, - - RunE: func(cmd *cobra.Command, args []string) error { - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - symbol := viper.GetString("symbol") - if len(symbol) == 0 { - return errors.New("empty symbol") - } - - key, secret := viper.GetString("max-api-key"), viper.GetString("max-api-secret") - if len(key) == 0 || len(secret) == 0 { - return errors.New("empty key or secret") - } - - side, err := cmd.Flags().GetString("side") - if err != nil { - return err - } - - iv, err := cmd.Flags().GetInt("num-orders") - if err != nil { - return err - } - var numOrders = iv - - fv, err := cmd.Flags().GetFloat64("base-quantity") - if err != nil { - return err - } - var baseQuantity = fixedpoint.NewFromFloat(fv) - - fv, err = cmd.Flags().GetFloat64("price-tick") - if err != nil { - return err - } - var priceTick = fixedpoint.NewFromFloat(fv) - - fv, err = cmd.Flags().GetFloat64("behind-volume") - if err != nil { - return err - } - - var behindVolume = fixedpoint.NewFromFloat(fv) - - buySellRatio, err := cmd.Flags().GetFloat64("buy-sell-ratio") - if err != nil { - return err - } - - maxRest := maxapi.NewRestClient(maxapi.ProductionAPIURL) - maxRest.Auth(key, secret) - - stream := max.NewStream(key, secret) - stream.Subscribe(types.BookChannel, symbol, types.SubscribeOptions{}) - - stream.OnOrderUpdate(func(order types.Order) { - log.Infof("order: %+v", order) - }) - - stream.OnBalanceSnapshot(func(balances types.BalanceMap) { - log.Infof("balances: %+v",balances) - }) - - streambook := types.NewStreamBook(symbol) - streambook.BindStream(stream) - - cancelSideOrders := func(symbol string, side string) { - if err := maxRest.OrderService.CancelAll(side, symbol); err != nil { - log.WithError(err).Error("cancel all error") - } - - streambook.C.Drain(2*time.Second, 5*time.Second) - } - - updateSideOrders := func(symbol string, side string, baseQuantity fixedpoint.Value) { - book := streambook.Copy() - - var pvs types.PriceVolumeSlice - - switch side { - case "buy": - pvs = book.Bids - case "sell": - pvs = book.Asks - } - - if pvs == nil || len(pvs) == 0 { - log.Warn("empty bids or asks") - return - } - - index := pvs.IndexByVolumeDepth(behindVolume) - if index == -1 { - // do not place orders - log.Warn("depth is not enough") - return - } - - var price = pvs[index].Price - var orders = generateOrders(symbol, side, price, priceTick, baseQuantity, numOrders) - if len(orders) == 0 { - log.Warn("empty orders") - return - } - log.Infof("submitting %d orders", len(orders)) - - retOrders, err := maxRest.OrderService.CreateMulti(symbol, orders) - if err != nil { - log.WithError(err).Error("create multi error") - } - _ = retOrders - - streambook.C.Drain(2*time.Second, 5*time.Second) - } - - update := func() { - switch side { - case "both": - cancelSideOrders(symbol, "buy") - updateSideOrders(symbol, "buy", baseQuantity.MulFloat64(buySellRatio)) - - cancelSideOrders(symbol, "sell") - updateSideOrders(symbol, "sell", baseQuantity.MulFloat64(1.0/buySellRatio)) - - default: - cancelSideOrders(symbol, side) - updateSideOrders(symbol, side, baseQuantity) - } - } - - go func() { - ticker := time.NewTicker(1 * time.Minute) - defer ticker.Stop() - - for { - select { - case <-ctx.Done(): - return - - case <-streambook.C: - streambook.C.Drain(2*time.Second, 5*time.Second) - update() - - case <-ticker.C: - update() - } - } - }() - - log.Info("connecting websocket...") - if err := stream.Connect(ctx); err != nil { - log.Fatal(err) - } - - cmdutil.WaitForSignal(ctx, syscall.SIGINT, syscall.SIGTERM) - return nil - }, -} - -func generateOrders(symbol, side string, price, priceTick, baseVolume fixedpoint.Value, numOrders int) (orders []maxapi.Order) { - var expBase = fixedpoint.NewFromFloat(0.0) - - switch side { - case "buy": - if priceTick > 0 { - priceTick = -priceTick - } - case "sell": - if priceTick < 0 { - priceTick = -priceTick - } - } - - for i := 0; i < numOrders; i++ { - volume := math.Exp(expBase.Float64()) * baseVolume.Float64() - - // skip order less than 10usd - if volume*price.Float64() < 10.0 { - log.Warnf("amount too small (< 10usd). price=%f volume=%f amount=%f", price.Float64(), volume, volume*price.Float64()) - continue - } - - orders = append(orders, maxapi.Order{ - Side: side, - OrderType: maxapi.OrderTypeLimit, - Market: symbol, - Price: util.FormatFloat(price.Float64(), 3), - Volume: util.FormatFloat(volume, 2), - // GroupID: 0, - }) - - log.Infof("%s order: %.2f @ %.3f", side, volume, price.Float64()) - - if len(orders) >= numOrders { - break - } - - price = price + priceTick - declog := math.Log10(math.Abs(priceTick.Float64())) - expBase += fixedpoint.NewFromFloat(math.Pow10(-int(declog)) * math.Abs(priceTick.Float64())) - log.Infof("expBase: %f", expBase.Float64()) - } - - return orders -} - -func main() { - viper.AutomaticEnv() - viper.SetEnvKeyReplacer(strings.NewReplacer("-", "_")) - - if err := viper.BindPFlags(rootCmd.PersistentFlags()); err != nil { - log.WithError(err).Error("bind pflags error") - } - - if err := rootCmd.ExecuteContext(context.Background()); err != nil { - log.WithError(err).Error("cmd error") - } -} diff --git a/examples/max-orders/main.go b/examples/max-orders/main.go deleted file mode 100644 index 7b97ede8c2..0000000000 --- a/examples/max-orders/main.go +++ /dev/null @@ -1,25 +0,0 @@ -package main - -import ( - "log" - "os" - - maxapi "github.com/c9s/bbgo/pkg/exchange/max/maxapi" -) - -func main() { - key := os.Getenv("MAX_API_KEY") - secret := os.Getenv("MAX_API_SECRET") - - maxRest := maxapi.NewRestClient(maxapi.ProductionAPIURL) - maxRest.Auth(key, secret) - - orders, err := maxRest.OrderService.All("maxusdt", 100, 1, maxapi.OrderStateDone) - if err != nil { - log.Fatal(err) - } - - for _, order := range orders { - log.Printf("%+v", order) - } -} diff --git a/examples/max-rewards/main.go b/examples/max-rewards/main.go new file mode 100644 index 0000000000..59a4326700 --- /dev/null +++ b/examples/max-rewards/main.go @@ -0,0 +1,49 @@ +package main + +import ( + "context" + "log" + "os" + + maxapi "github.com/c9s/bbgo/pkg/exchange/max/maxapi" +) + +func main() { + + key := os.Getenv("MAX_API_KEY") + secret := os.Getenv("MAX_API_SECRET") + + api := maxapi.NewRestClient(maxapi.ProductionAPIURL) + api.Auth(key, secret) + + ctx := context.Background() + + var req *maxapi.GetRewardsRequest + + if len(os.Args) > 1 { + pathType := os.Args[1] + rewardType, err := maxapi.ParseRewardType(pathType) + if err != nil { + log.Fatal(err) + } + + req = api.RewardService.NewGetRewardsOfTypeRequest(rewardType) + } else { + req = api.RewardService.NewGetRewardsRequest() + } + + // req.From(1613931192) + // req.From(1613240048) + // req.From(maxapi.TimestampSince) + // req.To(maxapi.TimestampSince + 3600 * 24) + req.Limit(100) + + rewards, err := req.Do(ctx) + if err != nil { + log.Fatal(err) + } + + for _, reward := range rewards { + log.Printf("%+v\n", reward) + } +} diff --git a/examples/max-withdraw/main.go b/examples/max-withdraw/main.go new file mode 100644 index 0000000000..46449e81ae --- /dev/null +++ b/examples/max-withdraw/main.go @@ -0,0 +1,125 @@ +package main + +import ( + "context" + "log" + "os" + "time" + + maxapi "github.com/c9s/bbgo/pkg/exchange/max/maxapi" + flag "github.com/spf13/pflag" +) + +func waitWithdrawalsComplete(ctx context.Context, client *maxapi.RestClient, currency string, limit int) error { + var lastState string + for { + withdrawals, err := client.WithdrawalService.NewGetWithdrawalHistoryRequest(). + Currency(currency). + Limit(limit). + Do(ctx) + if err != nil { + return err + } + + pending := false + for _, withdrawal := range withdrawals { + if lastState == "" { + log.Printf("-> %s", withdrawal.State) + } else if withdrawal.State != lastState { + log.Printf("%s -> %s", lastState, withdrawal.State) + log.Printf("withdrawal %s %s: %s", withdrawal.Amount, withdrawal.Currency, withdrawal.State) + log.Printf("\t%+v", withdrawal) + } + lastState = withdrawal.State + + switch withdrawal.State { + case "submitting", "submitted", "pending", "processing", "approved": + pending = true + + log.Printf("there is a pending withdrawal request, waiting...") + break + + case "sent", "confirmed": + continue + + case "rejected": + + } + + } + + if !pending { + break + } + + time.Sleep(10 * time.Second) + } + + return nil +} + +func main() { + var do bool + var currency string + var targetAddress string + var amount float64 + flag.StringVar(¤cy, "currency", "", "currency") + flag.StringVar(&targetAddress, "targetAddress", "", "withdraw target address") + flag.Float64Var(&amount, "amount", 0, "transfer amount") + flag.BoolVar(&do, "do", false, "do") + flag.Parse() + + key := os.Getenv("MAX_API_KEY") + secret := os.Getenv("MAX_API_SECRET") + + if currency == "" { + log.Fatal("--targetAddress is required") + } + + if targetAddress == "" { + log.Fatal("--targetAddress is required") + } + + if amount == 0.0 { + log.Fatal("--amount can not be zero") + } + + ctx := context.Background() + + maxRest := maxapi.NewRestClient(maxapi.ProductionAPIURL) + maxRest.Auth(key, secret) + + if err := waitWithdrawalsComplete(ctx, maxRest, currency, 1); err != nil { + log.Fatal(err) + } + log.Printf("all withdrawals are sent, sending new withdrawal request...") + + addresses, err := maxRest.WithdrawalService.NewGetWithdrawalAddressesRequest(). + Currency(currency).Do(ctx) + if err != nil { + log.Fatal(err) + } + + for _, address := range addresses { + if address.Address == targetAddress { + log.Printf("found address: %+v", address) + if do { + response, err := maxRest.WithdrawalService.NewWithdrawalRequest(). + Currency(currency). + Amount(amount). + AddressUUID(address.UUID). + Do(ctx) + if err != nil { + log.Fatal(err) + } + log.Printf("withdrawal request response: %+v", response) + break + } + } + } + + if err := waitWithdrawalsComplete(ctx, maxRest, currency, 1); err != nil { + log.Fatal(err) + } + log.Printf("all withdrawals are sent") +} diff --git a/examples/okex-book/main.go b/examples/okex-book/main.go new file mode 100644 index 0000000000..77218d81c4 --- /dev/null +++ b/examples/okex-book/main.go @@ -0,0 +1,214 @@ +package main + +import ( + "context" + "os" + "strings" + "time" + + "github.com/c9s/bbgo/pkg/exchange/okex/okexapi" + "github.com/joho/godotenv" + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + "github.com/spf13/viper" +) + +func init() { + rootCmd.PersistentFlags().String("okex-api-key", "", "okex api key") + rootCmd.PersistentFlags().String("okex-api-secret", "", "okex api secret") + rootCmd.PersistentFlags().String("okex-api-passphrase", "", "okex api secret") + rootCmd.PersistentFlags().String("symbol", "BNBUSDT", "symbol") +} + +var rootCmd = &cobra.Command{ + Use: "okex-book", + Short: "okex book", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + RunE: func(cmd *cobra.Command, args []string) error { + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + symbol := viper.GetString("symbol") + if len(symbol) == 0 { + return errors.New("empty symbol") + } + + key, secret, passphrase := viper.GetString("okex-api-key"), + viper.GetString("okex-api-secret"), + viper.GetString("okex-api-passphrase") + if len(key) == 0 || len(secret) == 0 { + return errors.New("empty key, secret or passphrase") + } + + client := okexapi.NewClient() + client.Auth(key, secret, passphrase) + + instruments, err := client.PublicDataService.NewGetInstrumentsRequest(). + InstrumentType("SPOT").Do(ctx) + if err != nil { + return err + } + + log.Infof("instruments: %+v", instruments) + + fundingRate, err := client.PublicDataService.NewGetFundingRate().InstrumentID("BTC-USDT-SWAP").Do(ctx) + if err != nil { + return err + } + log.Infof("funding rate: %+v", fundingRate) + + log.Infof("ACCOUNT BALANCES:") + account, err := client.AccountBalances() + if err != nil { + return err + } + + log.Infof("%+v", account) + + log.Infof("ASSET BALANCES:") + assetBalances, err := client.AssetBalances() + if err != nil { + return err + } + + for _, balance := range assetBalances { + log.Infof("%T%+v", balance, balance) + } + + log.Infof("ASSET CURRENCIES:") + currencies, err := client.AssetCurrencies() + if err != nil { + return err + } + + for _, currency := range currencies { + log.Infof("%T%+v", currency, currency) + } + + log.Infof("MARKET TICKERS:") + tickers, err := client.MarketTickers(okexapi.InstrumentTypeSpot) + if err != nil { + return err + } + + for _, ticker := range tickers { + log.Infof("%T%+v", ticker, ticker) + } + + ticker, err := client.MarketTicker("ETH-USDT") + if err != nil { + return err + } + log.Infof("TICKER:") + log.Infof("%T%+v", ticker, ticker) + + log.Infof("PLACING ORDER:") + placeResponse, err := client.TradeService.NewPlaceOrderRequest(). + InstrumentID("LTC-USDT"). + OrderType(okexapi.OrderTypeLimit). + Side(okexapi.SideTypeBuy). + Price("50.0"). + Quantity("0.5"). + Do(ctx) + if err != nil { + return err + } + + log.Infof("place order response: %+v", placeResponse) + time.Sleep(time.Second) + + log.Infof("getting order detail...") + orderDetail, err := client.TradeService.NewGetOrderDetailsRequest(). + InstrumentID("LTC-USDT"). + OrderID(placeResponse.OrderID). + Do(ctx) + if err != nil { + return err + } + + log.Infof("order detail: %+v", orderDetail) + + cancelResponse, err := client.TradeService.NewCancelOrderRequest(). + InstrumentID("LTC-USDT"). + OrderID(placeResponse.OrderID). + Do(ctx) + if err != nil { + return err + } + log.Infof("cancel order response: %+v", cancelResponse) + + time.Sleep(time.Second) + + log.Infof("BATCH PLACE ORDER:") + batchPlaceReq := client.TradeService.NewBatchPlaceOrderRequest() + batchPlaceReq.Add(client.TradeService.NewPlaceOrderRequest(). + InstrumentID("LTC-USDT"). + OrderType(okexapi.OrderTypeLimit). + Side(okexapi.SideTypeBuy). + Price("50.0"). + Quantity("0.5")) + + batchPlaceReq.Add(client.TradeService.NewPlaceOrderRequest(). + InstrumentID("LTC-USDT"). + OrderType(okexapi.OrderTypeLimit). + Side(okexapi.SideTypeBuy). + Price("30.0"). + Quantity("0.5")) + + batchPlaceResponse, err := batchPlaceReq.Do(ctx) + if err != nil { + return err + } + + log.Infof("batch place order response: %+v", batchPlaceResponse) + time.Sleep(time.Second) + + log.Infof("getting pending orders...") + pendingOrders, err := client.TradeService.NewGetPendingOrderRequest().Do(ctx) + if err != nil { + return err + } + for _, pendingOrder := range pendingOrders { + log.Infof("pending order: %+v", pendingOrder) + } + + cancelReq := client.TradeService.NewBatchCancelOrderRequest() + for _, resp := range batchPlaceResponse { + cancelReq.Add(client.TradeService.NewCancelOrderRequest(). + InstrumentID("LTC-USDT"). + OrderID(resp.OrderID)) + } + + batchCancelResponse, err := cancelReq.Do(ctx) + if err != nil { + return err + } + log.Infof("batch cancel order response: %+v", batchCancelResponse) + + // cmdutil.WaitForSignal(ctx, syscall.SIGINT, syscall.SIGTERM) + return nil + }, +} + +func main() { + if _, err := os.Stat(".env.local"); err == nil { + if err := godotenv.Load(".env.local"); err != nil { + log.Fatal(err) + } + } + + viper.AutomaticEnv() + viper.SetEnvKeyReplacer(strings.NewReplacer("-", "_")) + + if err := viper.BindPFlags(rootCmd.PersistentFlags()); err != nil { + log.WithError(err).Error("bind pflags error") + } + + if err := rootCmd.ExecuteContext(context.Background()); err != nil { + log.WithError(err).Error("cmd error") + } +} diff --git a/examples/telebot/main.go b/examples/telebot/main.go new file mode 100644 index 0000000000..64fa760ba1 --- /dev/null +++ b/examples/telebot/main.go @@ -0,0 +1,154 @@ +package main + +import ( + "os" + "time" + + "github.com/google/uuid" + log "github.com/sirupsen/logrus" + tb "gopkg.in/tucnak/telebot.v2" +) + +func main() { + var ( + // Universal markup builders. + menu = &tb.ReplyMarkup{ResizeReplyKeyboard: true} + + // Reply buttons. + btnHelp = menu.Text("ℹ Help") + btnSettings = menu.Text("⚙ Settings") + ) + + menu.Reply( + menu.Row(btnHelp), + menu.Row(btnSettings), + ) + + b, err := tb.NewBot(tb.Settings{ + // You can also set custom API URL. + // If field is empty it equals to "https://api.telegram.org". + // URL: "http://195.129.111.17:8012", + + Token: os.Getenv("TELEGRAM_BOT_TOKEN"), + Poller: &tb.LongPoller{Timeout: 10 * time.Second}, + // Synchronous: false, + Verbose: false, + // ParseMode: "", + // Reporter: nil, + // Client: nil, + // Offline: false, + }) + + if err != nil { + log.Fatal(err) + return + } + // Command: /start + b.Handle("/start", func(m *tb.Message) { + if !m.Private() { + return + } + + b.Send(m.Sender, "Hello!", menu) + }) + + // On reply button pressed (message) + b.Handle(&btnHelp, func(m *tb.Message) { + log.Infof("btnHelp: %#v", m) + + var ( + // Inline buttons. + // + // Pressing it will cause the client to + // send the bot a callback. + // + // Make sure Unique stays unique as per button kind, + // as it has to be for callback routing to work. + // + selector = &tb.ReplyMarkup{} + btnPrev = selector.Data("⬅", "prev", "data1", "data2") + btnNext = selector.Data("➡", "next", "data1", "data2") + ) + selector.Inline( + selector.Row(btnPrev, btnNext), + ) + + // On inline button pressed (callback) + b.Handle(&btnPrev, func(c *tb.Callback) { + // Always respond! + b.Respond(c, &tb.CallbackResponse{ + Text: "callback response", + ShowAlert: false, + // URL: "", + }) + }) + + b.Send(m.Sender, "help button clicked", selector) + }) + + b.Handle("/hello", func(m *tb.Message) { + log.Infof("/hello %#v", m) + // b.Send(m.Sender, "Hello World!") + }) + + b.Handle(tb.OnText, func(m *tb.Message) { + log.Infof("[onText] %#v", m) + // all the text messages that weren't + // captured by existing handlers + }) + + b.Handle(tb.OnQuery, func(q *tb.Query) { + log.Infof("[onQuery] %#v", q) + + // r := &tb.ReplyMarkup{} + // r.URL("test", "https://media.tenor.com/images/f176705ae1bb3c457e19d8cd71718ac0/tenor.gif") + urls := []string{ + // "https://media.tenor.com/images/aae0cdf3c5a291cd7b96432180f6eee3/tenor.png", + // "https://media.tenor.com/images/905c1a9b1f56ae3c458b1ef58fd46357/tenor.png", + + "https://media.tenor.com/images/2e69768f9537957ed3015a80ebc3f0f1/tenor.gif", + "https://media.tenor.com/images/6fcd72b29127a55e5c35db86d06d665c/tenor.gif", + "https://media.tenor.com/images/05dbf5bf3a3b88275bb045691541dc53/tenor.gif", + "https://media.tenor.com/images/0e1a52cfe5616c1509090d6ec2312db0/tenor.gif", + "https://media.tenor.com/images/1ca04a449b26e1f7d45682a79d2c8697/tenor.gif", + "https://media.tenor.com/images/a2844b186fb71c376226b56c4ea7730a/tenor.gif", + "https://media.tenor.com/images/ec636a1ebce1a3fc1c795b851c125b31/tenor.gif", + "https://media.tenor.com/images/ae103819cb05a0cf7497900b77b87d80/tenor.gif", + } + + results := make(tb.Results, len(urls)) // []tb.Result + for i, url := range urls { + // result := &tb.PhotoResult{ + result := &tb.GifResult{ + ResultBase: tb.ResultBase{ + // Type: "photo", + // Content: nil, + // ReplyMarkup: nil, + ID: uuid.New().String(), + }, + + URL: url, + + // required for photos + ThumbURL: url, + } + + results[i] = result + // needed to set a unique string ID for each result + // results[i].SetResultID(strconv.Itoa(i)) + } + + err := b.Answer(q, &tb.QueryResponse{ + QueryID: q.ID, + Results: results, + // CacheTime: 60, // a minute + IsPersonal: true, + }) + + if err != nil { + log.Println(err) + } + }) + + b.Start() +} diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 0000000000..1437c53f70 --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,34 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.js + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# local env files +.env.local +.env.development.local +.env.test.local +.env.production.local + +# vercel +.vercel diff --git a/frontend/.prettierignore b/frontend/.prettierignore new file mode 100644 index 0000000000..9b87eae9e3 --- /dev/null +++ b/frontend/.prettierignore @@ -0,0 +1,3 @@ +.next/ +out/ +node_modules/ \ No newline at end of file diff --git a/frontend/.prettierrc.json b/frontend/.prettierrc.json new file mode 100644 index 0000000000..544138be45 --- /dev/null +++ b/frontend/.prettierrc.json @@ -0,0 +1,3 @@ +{ + "singleQuote": true +} diff --git a/frontend/README.md b/frontend/README.md new file mode 100644 index 0000000000..4b412a3cfa --- /dev/null +++ b/frontend/README.md @@ -0,0 +1,34 @@ +This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app). + +## Getting Started + +First, run the development server: + +```bash +npm run dev +# or +yarn dev +``` + +Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. + +You can start editing the page by modifying `pages/index.js`. The page auto-updates as you edit the file. + +[API routes](https://nextjs.org/docs/api-routes/introduction) can be accessed on [http://localhost:3000/api/hello](http://localhost:3000/api/hello). This endpoint can be edited in `pages/api/hello.js`. + +The `pages/api` directory is mapped to `/api/*`. Files in this directory are treated as [API routes](https://nextjs.org/docs/api-routes/introduction) instead of React pages. + +## Learn More + +To learn more about Next.js, take a look at the following resources: + +- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API. +- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial. + +You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome! + +## Deploy on Vercel + +The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/import?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. + +Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details. diff --git a/frontend/api/bbgo.ts b/frontend/api/bbgo.ts new file mode 100644 index 0000000000..fe2948999b --- /dev/null +++ b/frontend/api/bbgo.ts @@ -0,0 +1,190 @@ +import axios from 'axios'; + +const baseURL = + process.env.NODE_ENV === 'development' ? 'http://localhost:8080' : ''; + +export function ping(cb) { + return axios.get(baseURL + '/api/ping').then((response) => { + cb(response.data); + }); +} + +export function queryOutboundIP(cb) { + return axios.get(baseURL + '/api/outbound-ip').then((response) => { + cb(response.data.outboundIP); + }); +} + +export async function triggerSync() { + return axios.post(baseURL + '/api/environment/sync'); +} + +export enum SyncStatus { + SyncNotStarted = 0, + Syncing = 1, + SyncDone = 2, +} + +export async function querySyncStatus(): Promise { + const resp = await axios.get(baseURL + '/api/environment/syncing'); + return resp.data.syncing; +} + +export function testDatabaseConnection(params, cb) { + return axios.post(baseURL + '/api/setup/test-db', params).then((response) => { + cb(response.data); + }); +} + +export function configureDatabase(params, cb) { + return axios + .post(baseURL + '/api/setup/configure-db', params) + .then((response) => { + cb(response.data); + }); +} + +export function saveConfig(cb) { + return axios.post(baseURL + '/api/setup/save').then((response) => { + cb(response.data); + }); +} + +export function setupRestart(cb) { + return axios.post(baseURL + '/api/setup/restart').then((response) => { + cb(response.data); + }); +} + +export function addSession(session, cb) { + return axios.post(baseURL + '/api/sessions', session).then((response) => { + cb(response.data || []); + }); +} + +export function attachStrategyOn(session, strategyID, strategy, cb) { + return axios + .post( + baseURL + `/api/setup/strategy/single/${strategyID}/session/${session}`, + strategy + ) + .then((response) => { + cb(response.data); + }); +} + +export function testSessionConnection(session, cb) { + return axios + .post(baseURL + '/api/sessions/test', session) + .then((response) => { + cb(response.data); + }); +} + +export function queryStrategies(cb) { + return axios.get(baseURL + '/api/strategies/single').then((response) => { + cb(response.data.strategies || []); + }); +} + +export function querySessions(cb) { + return axios.get(baseURL + '/api/sessions', {}).then((response) => { + cb(response.data.sessions || []); + }); +} + +export function querySessionSymbols(sessionName, cb) { + return axios + .get(baseURL + `/api/sessions/${sessionName}/symbols`, {}) + .then((response) => { + cb(response.data?.symbols || []); + }); +} + +export function queryTrades(params, cb) { + axios + .get(baseURL + '/api/trades', { params: params }) + .then((response) => { + cb(response.data.trades || []); + }); +} + +export function queryClosedOrders(params, cb) { + axios + .get(baseURL + '/api/orders/closed', { params: params }) + .then((response) => { + cb(response.data.orders || []); + }); +} + +export function queryAssets(cb) { + axios.get(baseURL + '/api/assets', {}).then((response) => { + cb(response.data.assets || []); + }); +} + +export function queryTradingVolume(params, cb) { + axios + .get(baseURL + '/api/trading-volume', { params: params }) + .then((response) => { + cb(response.data.tradingVolumes || []); + }); +} + +export interface GridStrategy { + id: string; + instanceID: string; + strategy: string; + grid: { + symbol: string; + }; + stats: GridStats; + status: string; + startTime: number; +} + +export interface GridStats { + oneDayArbs: number; + totalArbs: number; + investment: number; + totalProfits: number; + gridProfits: number; + floatingPNL: number; + currentPrice: number; + lowestPrice: number; + highestPrice: number; +} + +export async function queryStrategiesMetrics(): Promise { + const temp = { + id: 'uuid', + instanceID: 'testInstanceID', + strategy: 'grid', + grid: { + symbol: 'BTCUSDT', + }, + stats: { + oneDayArbs: 0, + totalArbs: 3, + investment: 100, + totalProfits: 5.6, + gridProfits: 2.5, + floatingPNL: 3.1, + currentPrice: 29000, + lowestPrice: 25000, + highestPrice: 35000, + }, + status: 'RUNNING', + startTime: 1654938187102, + }; + + const testArr = []; + + for (let i = 0; i < 11; i++) { + const cloned = { ...temp }; + cloned.id = 'uuid' + i; + testArr.push(cloned); + } + + return testArr; +} diff --git a/frontend/components/AddExchangeSessionForm.js b/frontend/components/AddExchangeSessionForm.js new file mode 100644 index 0000000000..d8fc566cec --- /dev/null +++ b/frontend/components/AddExchangeSessionForm.js @@ -0,0 +1,342 @@ +import React from 'react'; +import Grid from '@mui/material/Grid'; +import Box from '@mui/material/Box'; +import Button from '@mui/material/Button'; +import Typography from '@mui/material/Typography'; +import TextField from '@mui/material/TextField'; +import FormControlLabel from '@mui/material/FormControlLabel'; +import FormHelperText from '@mui/material/FormHelperText'; +import InputLabel from '@mui/material/InputLabel'; +import FormControl from '@mui/material/FormControl'; +import InputAdornment from '@mui/material/InputAdornment'; +import IconButton from '@mui/material/IconButton'; + +import Checkbox from '@mui/material/Checkbox'; +import Select from '@mui/material/Select'; +import MenuItem from '@mui/material/MenuItem'; +import FilledInput from '@mui/material/FilledInput'; + +import Alert from '@mui/lab/Alert'; +import VisibilityOff from '@mui/icons-material/VisibilityOff'; +import Visibility from '@mui/icons-material/Visibility'; + +import { addSession, testSessionConnection } from '../api/bbgo'; + +import { makeStyles } from '@mui/styles'; + +const useStyles = makeStyles((theme) => ({ + formControl: { + marginTop: theme.spacing(1), + marginBottom: theme.spacing(1), + minWidth: 120, + }, + buttons: { + display: 'flex', + justifyContent: 'flex-end', + marginTop: theme.spacing(2), + paddingTop: theme.spacing(2), + paddingBottom: theme.spacing(2), + '& > *': { + marginLeft: theme.spacing(1), + }, + }, +})); + +export default function AddExchangeSessionForm({ onBack, onAdded }) { + const classes = useStyles(); + const [exchangeType, setExchangeType] = React.useState('max'); + const [customSessionName, setCustomSessionName] = React.useState(false); + const [sessionName, setSessionName] = React.useState(exchangeType); + + const [testing, setTesting] = React.useState(false); + const [testResponse, setTestResponse] = React.useState(null); + const [response, setResponse] = React.useState(null); + + const [apiKey, setApiKey] = React.useState(''); + const [apiSecret, setApiSecret] = React.useState(''); + + const [showApiKey, setShowApiKey] = React.useState(false); + const [showApiSecret, setShowApiSecret] = React.useState(false); + + const [isMargin, setIsMargin] = React.useState(false); + const [isIsolatedMargin, setIsIsolatedMargin] = React.useState(false); + const [isolatedMarginSymbol, setIsolatedMarginSymbol] = React.useState(''); + + const resetTestResponse = () => { + setTestResponse(null); + }; + + const handleExchangeTypeChange = (event) => { + setExchangeType(event.target.value); + setSessionName(event.target.value); + resetTestResponse(); + }; + + const createSessionConfig = () => { + return { + name: sessionName, + exchange: exchangeType, + key: apiKey, + secret: apiSecret, + margin: isMargin, + envVarPrefix: exchangeType.toUpperCase(), + isolatedMargin: isIsolatedMargin, + isolatedMarginSymbol: isolatedMarginSymbol, + }; + }; + + const handleAdd = (event) => { + const payload = createSessionConfig(); + addSession(payload, (response) => { + setResponse(response); + if (onAdded) { + setTimeout(onAdded, 3000); + } + }).catch((error) => { + console.error(error); + setResponse(error.response); + }); + }; + + const handleTestConnection = (event) => { + const payload = createSessionConfig(); + setTesting(true); + testSessionConnection(payload, (response) => { + console.log(response); + setTesting(false); + setTestResponse(response); + }).catch((error) => { + console.error(error); + setTesting(false); + setTestResponse(error.response); + }); + }; + + return ( + + + Add Exchange Session + + + + + Exchange + + + + + + { + setSessionName(event.target.value); + }} + value={sessionName} + /> + + + + { + setCustomSessionName(event.target.checked); + }} + value="1" + /> + } + label="Custom exchange session name" + /> + + By default, the session name will be the exchange type name, e.g.{' '} + binance or max.
+ If you're using multiple exchange sessions, you might need to custom + the session name.
+ This is for advanced users. +
+
+ + + + API Key + + { + setShowApiKey(!showApiKey); + }} + onMouseDown={(event) => { + event.preventDefault(); + }} + edge="end" + > + {showApiKey ? : } + + + } + onChange={(event) => { + setApiKey(event.target.value); + resetTestResponse(); + }} + /> + + + + + + API Secret + + { + setShowApiSecret(!showApiSecret); + }} + onMouseDown={(event) => { + event.preventDefault(); + }} + edge="end" + > + {showApiSecret ? : } + + + } + onChange={(event) => { + setApiSecret(event.target.value); + resetTestResponse(); + }} + /> + + + + {exchangeType === 'binance' ? ( + + { + setIsMargin(event.target.checked); + resetTestResponse(); + }} + value="1" + /> + } + label="Use margin trading." + /> + + This is only available for Binance. Please use the leverage at + your own risk. + + + { + setIsIsolatedMargin(event.target.checked); + resetTestResponse(); + }} + value="1" + /> + } + label="Use isolated margin trading." + /> + + This is only available for Binance. If this is set, you can only + trade one symbol with one session. + + + {isIsolatedMargin ? ( + { + setIsolatedMarginSymbol(event.target.value); + resetTestResponse(); + }} + fullWidth + required + /> + ) : null} + + ) : null} +
+ +
+ + + + + +
+ + {testResponse ? ( + testResponse.error ? ( + + {testResponse.error} + + ) : testResponse.success ? ( + + Connection Test Succeeded + + ) : null + ) : null} + + {response ? ( + response.error ? ( + + {response.error} + + ) : response.success ? ( + + Exchange Session Added + + ) : null + ) : null} +
+ ); +} diff --git a/frontend/components/ConfigureDatabaseForm.js b/frontend/components/ConfigureDatabaseForm.js new file mode 100644 index 0000000000..1b3d7ce35c --- /dev/null +++ b/frontend/components/ConfigureDatabaseForm.js @@ -0,0 +1,209 @@ +import React from 'react'; +import Grid from '@mui/material/Grid'; +import Box from '@mui/material/Box'; +import Button from '@mui/material/Button'; +import Typography from '@mui/material/Typography'; +import TextField from '@mui/material/TextField'; +import FormHelperText from '@mui/material/FormHelperText'; +import Radio from '@mui/material/Radio'; +import RadioGroup from '@mui/material/RadioGroup'; +import FormControlLabel from '@mui/material/FormControlLabel'; +import FormControl from '@mui/material/FormControl'; +import FormLabel from '@mui/material/FormLabel'; + +import Alert from '@mui/lab/Alert'; + +import { configureDatabase, testDatabaseConnection } from '../api/bbgo'; + +import { makeStyles } from '@mui/styles'; + +const useStyles = makeStyles((theme) => ({ + formControl: { + marginTop: theme.spacing(1), + marginBottom: theme.spacing(1), + minWidth: 120, + }, + buttons: { + display: 'flex', + justifyContent: 'flex-end', + marginTop: theme.spacing(2), + paddingTop: theme.spacing(2), + paddingBottom: theme.spacing(2), + '& > *': { + marginLeft: theme.spacing(1), + }, + }, +})); + +export default function ConfigureDatabaseForm({ onConfigured }) { + const classes = useStyles(); + + const [mysqlURL, setMysqlURL] = React.useState( + 'root@tcp(127.0.0.1:3306)/bbgo' + ); + + const [driver, setDriver] = React.useState('sqlite3'); + const [testing, setTesting] = React.useState(false); + const [testResponse, setTestResponse] = React.useState(null); + const [configured, setConfigured] = React.useState(false); + + const getDSN = () => (driver === 'sqlite3' ? 'file:bbgo.sqlite3' : mysqlURL); + + const resetTestResponse = () => { + setTestResponse(null); + }; + + const handleConfigureDatabase = (event) => { + const dsn = getDSN(); + + configureDatabase({ driver, dsn }, (response) => { + console.log(response); + setTesting(false); + setTestResponse(response); + if (onConfigured) { + setConfigured(true); + setTimeout(onConfigured, 3000); + } + }).catch((err) => { + console.error(err); + setTesting(false); + setTestResponse(err.response.data); + }); + }; + + const handleTestConnection = (event) => { + const dsn = getDSN(); + + setTesting(true); + testDatabaseConnection({ driver, dsn }, (response) => { + console.log(response); + setTesting(false); + setTestResponse(response); + }).catch((err) => { + console.error(err); + setTesting(false); + setTestResponse(err.response.data); + }); + }; + + return ( + + + Configure Database + + + + If you have database installed on your machine, you can enter the DSN + string in the following field. Please note this is optional, you CAN + SKIP this step. + + + + + + + Database Driver + { + setDriver(event.target.value); + }} + > + } + label="Standard (Default)" + /> + } + label="MySQL" + /> + + + + + + + {driver === 'mysql' ? ( + + { + setMysqlURL(event.target.value); + resetTestResponse(); + }} + /> + MySQL DSN + + + If you have database installed on your machine, you can enter the + DSN string like the following format: +
+
+                root:password@tcp(127.0.0.1:3306)/bbgo
+              
+
+ Be sure to create your database before using it. You need to + execute the following statement to create a database: +
+
+                CREATE DATABASE bbgo CHARSET utf8;
+              
+
+
+ ) : ( + + + + If you don't know what to choose, just pick the standard driver + (sqlite3). +
+ For professionals, you can pick MySQL driver, BBGO works best + with MySQL, especially for larger data scale. +
+
+
+ )} +
+ +
+ + + +
+ + {testResponse ? ( + testResponse.error ? ( + + {testResponse.error} + + ) : testResponse.success ? ( + + Connection Test Succeeded + + ) : null + ) : null} +
+ ); +} diff --git a/frontend/components/ConfigureGridStrategyForm.js b/frontend/components/ConfigureGridStrategyForm.js new file mode 100644 index 0000000000..2305671e52 --- /dev/null +++ b/frontend/components/ConfigureGridStrategyForm.js @@ -0,0 +1,446 @@ +import React from 'react'; +import PropTypes from 'prop-types'; + +import Grid from '@mui/material/Grid'; +import Button from '@mui/material/Button'; +import Typography from '@mui/material/Typography'; + +import { makeStyles } from '@mui/styles'; +import { + attachStrategyOn, + querySessions, + querySessionSymbols, +} from '../api/bbgo'; + +import TextField from '@mui/material/TextField'; +import FormControlLabel from '@mui/material/FormControlLabel'; +import FormHelperText from '@mui/material/FormHelperText'; +import InputLabel from '@mui/material/InputLabel'; +import FormControl from '@mui/material/FormControl'; +import Radio from '@mui/material/Radio'; +import RadioGroup from '@mui/material/RadioGroup'; +import FormLabel from '@mui/material/FormLabel'; +import Select from '@mui/material/Select'; +import MenuItem from '@mui/material/MenuItem'; + +import Alert from '@mui/lab/Alert'; +import Box from '@mui/material/Box'; + +import NumberFormat from 'react-number-format'; + +function parseFloatValid(s) { + if (s) { + const f = parseFloat(s); + if (!isNaN(f)) { + return f; + } + } + + return null; +} + +function parseFloatCall(s, cb) { + if (s) { + const f = parseFloat(s); + if (!isNaN(f)) { + cb(f); + } + } +} + +function StandardNumberFormat(props) { + const { inputRef, onChange, ...other } = props; + return ( + { + onChange({ + target: { + name: props.name, + value: values.value, + }, + }); + }} + thousandSeparator + isNumericString + /> + ); +} + +StandardNumberFormat.propTypes = { + inputRef: PropTypes.func.isRequired, + name: PropTypes.string.isRequired, + onChange: PropTypes.func.isRequired, +}; + +function PriceNumberFormat(props) { + const { inputRef, onChange, ...other } = props; + + return ( + { + onChange({ + target: { + name: props.name, + value: values.value, + }, + }); + }} + thousandSeparator + isNumericString + prefix="$" + /> + ); +} + +PriceNumberFormat.propTypes = { + inputRef: PropTypes.func.isRequired, + name: PropTypes.string.isRequired, + onChange: PropTypes.func.isRequired, +}; + +const useStyles = makeStyles((theme) => ({ + formControl: { + marginTop: theme.spacing(1), + marginBottom: theme.spacing(1), + minWidth: 120, + }, + buttons: { + display: 'flex', + justifyContent: 'flex-end', + marginTop: theme.spacing(2), + paddingTop: theme.spacing(2), + paddingBottom: theme.spacing(2), + '& > *': { + marginLeft: theme.spacing(1), + }, + }, +})); + +export default function ConfigureGridStrategyForm({ onBack, onAdded }) { + const classes = useStyles(); + + const [errors, setErrors] = React.useState({}); + + const [sessions, setSessions] = React.useState([]); + + const [activeSessionSymbols, setActiveSessionSymbols] = React.useState([]); + + const [selectedSessionName, setSelectedSessionName] = React.useState(null); + + const [selectedSymbol, setSelectedSymbol] = React.useState(''); + + const [quantityBy, setQuantityBy] = React.useState('fixedAmount'); + + const [upperPrice, setUpperPrice] = React.useState(30000.0); + const [lowerPrice, setLowerPrice] = React.useState(10000.0); + + const [fixedAmount, setFixedAmount] = React.useState(100.0); + const [fixedQuantity, setFixedQuantity] = React.useState(1.234); + const [gridNumber, setGridNumber] = React.useState(20); + const [profitSpread, setProfitSpread] = React.useState(100.0); + + const [response, setResponse] = React.useState({}); + + React.useEffect(() => { + querySessions((sessions) => { + setSessions(sessions); + }); + }, []); + + const handleAdd = (event) => { + const payload = { + symbol: selectedSymbol, + gridNumber: parseFloatValid(gridNumber), + profitSpread: parseFloatValid(profitSpread), + upperPrice: parseFloatValid(upperPrice), + lowerPrice: parseFloatValid(lowerPrice), + }; + switch (quantityBy) { + case 'fixedQuantity': + payload.quantity = parseFloatValid(fixedQuantity); + break; + + case 'fixedAmount': + payload.amount = parseFloatValid(fixedAmount); + break; + } + + if (!selectedSessionName) { + setErrors({ session: true }); + return; + } + + if (!selectedSymbol) { + setErrors({ symbol: true }); + return; + } + + console.log(payload); + attachStrategyOn(selectedSessionName, 'grid', payload, (response) => { + console.log(response); + setResponse(response); + if (onAdded) { + setTimeout(onAdded, 3000); + } + }) + .catch((err) => { + console.error(err); + setResponse(err.response.data); + }) + .finally(() => { + setErrors({}); + }); + }; + + const handleQuantityBy = (event) => { + setQuantityBy(event.target.value); + }; + + const handleSessionChange = (event) => { + const sessionName = event.target.value; + setSelectedSessionName(sessionName); + + querySessionSymbols(sessionName, (symbols) => { + setActiveSessionSymbols(symbols); + }).catch((err) => { + console.error(err); + setResponse(err.response.data); + }); + }; + + const sessionMenuItems = sessions.map((session, index) => { + return ( + + {session.name} + + ); + }); + + const symbolMenuItems = activeSessionSymbols.map((symbol, index) => { + return ( + + {symbol} + + ); + }); + + return ( + + + Add Grid Strategy + + + + Fixed price band grid strategy uses the fixed price band to place + buy/sell orders. This strategy places sell orders above the current + price, places buy orders below the current price. If any of the order is + executed, then it will automatically place a new profit order on the + reverse side. + + + + + + Session + + + + Select the exchange session you want to mount this strategy. + + + + + + Market + + + + Select the market you want to run this strategy + + + + + { + parseFloatCall(event.target.value, setUpperPrice); + }} + value={upperPrice} + InputProps={{ + inputComponent: PriceNumberFormat, + }} + /> + + + + { + parseFloatCall(event.target.value, setLowerPrice); + }} + value={lowerPrice} + InputProps={{ + inputComponent: PriceNumberFormat, + }} + /> + + + + { + parseFloatCall(event.target.value, setProfitSpread); + }} + value={profitSpread} + InputProps={{ + inputComponent: StandardNumberFormat, + }} + /> + + + + + Order Quantity By + + } + label="Fixed Amount" + /> + } + label="Fixed Quantity" + /> + + + + + + {quantityBy === 'fixedQuantity' ? ( + { + parseFloatCall(event.target.value, setFixedQuantity); + }} + value={fixedQuantity} + InputProps={{ + inputComponent: StandardNumberFormat, + }} + /> + ) : null} + + {quantityBy === 'fixedAmount' ? ( + { + parseFloatCall(event.target.value, setFixedAmount); + }} + value={fixedAmount} + InputProps={{ + inputComponent: PriceNumberFormat, + }} + /> + ) : null} + + + + { + parseFloatCall(event.target.value, setGridNumber); + }} + value={gridNumber} + InputProps={{ + inputComponent: StandardNumberFormat, + }} + /> + + + +
+ + + +
+ + {response ? ( + response.error ? ( + + {response.error} + + ) : response.success ? ( + + Strategy Added + + ) : null + ) : null} +
+ ); +} diff --git a/frontend/components/ConnectWallet.js b/frontend/components/ConnectWallet.js new file mode 100644 index 0000000000..e68eaf799b --- /dev/null +++ b/frontend/components/ConnectWallet.js @@ -0,0 +1,143 @@ +import React from 'react'; + +import { makeStyles } from '@mui/styles'; + +import Button from '@mui/material/Button'; +import ClickAwayListener from '@mui/material/ClickAwayListener'; +import Grow from '@mui/material/Grow'; +import Paper from '@mui/material/Paper'; +import Popper from '@mui/material/Popper'; +import MenuItem from '@mui/material/MenuItem'; +import MenuList from '@mui/material/MenuList'; +import ListItemText from '@mui/material/ListItemText'; +import PersonIcon from '@mui/icons-material/Person'; + +import { useEtherBalance, useTokenBalance, useEthers } from '@usedapp/core'; +import { formatEther } from '@ethersproject/units'; + +const useStyles = makeStyles((theme) => ({ + buttons: { + margin: theme.spacing(1), + padding: theme.spacing(1), + }, + profile: { + margin: theme.spacing(1), + padding: theme.spacing(1), + }, +})); + +const BBG = '0x3Afe98235d680e8d7A52e1458a59D60f45F935C0'; + +export default function ConnectWallet() { + const classes = useStyles(); + + const { activateBrowserWallet, account } = useEthers(); + const etherBalance = useEtherBalance(account); + const tokenBalance = useTokenBalance(BBG, account); + + const [open, setOpen] = React.useState(false); + const anchorRef = React.useRef(null); + + const handleToggle = () => { + setOpen((prevOpen) => !prevOpen); + }; + + const handleClose = (event) => { + if (anchorRef.current && anchorRef.current.contains(event.target)) { + return; + } + + setOpen(false); + }; + + function handleListKeyDown(event) { + if (event.key === 'Tab') { + event.preventDefault(); + setOpen(false); + } else if (event.key === 'Escape') { + setOpen(false); + } + } + + // return focus to the button when we transitioned from !open -> open + const prevOpen = React.useRef(open); + React.useEffect(() => { + if (prevOpen.current === true && open === false) { + anchorRef.current.focus(); + } + + prevOpen.current = open; + }, [open]); + + return ( + <> + {account ? ( + <> + + + {({ TransitionProps, placement }) => ( + + + + + + {account &&

Account: {account}

} +
+ + {etherBalance && ( + ETH Balance: {formatEther(etherBalance)} + )} + + + {tokenBalance && ( + BBG Balance: {formatEther(tokenBalance)} + )} + +
+
+
+
+ )} +
+ + ) : ( +
+ +
+ )} + + ); +} diff --git a/frontend/components/Detail.tsx b/frontend/components/Detail.tsx new file mode 100644 index 0000000000..79f4ed739a --- /dev/null +++ b/frontend/components/Detail.tsx @@ -0,0 +1,56 @@ +import { styled } from '@mui/styles'; +import type { GridStrategy } from '../api/bbgo'; + +import RunningTime from './RunningTime'; +import Summary from './Summary'; +import Stats from './Stats'; + +const StrategyContainer = styled('section')(() => ({ + display: 'flex', + flexDirection: 'column', + justifyContent: 'space-around', + width: '350px', + border: '1px solid rgb(248, 149, 35)', + borderRadius: '10px', + padding: '10px', +})); + +const Strategy = styled('div')(() => ({ + fontSize: '20px', +})); + +export const Description = styled('div')(() => ({ + color: 'rgb(140, 140, 140)', + '& .duration': { + marginLeft: '3px', + }, +})); + +export default function Detail({ data }: { data: GridStrategy }) { + const { strategy, stats, startTime } = data; + const totalProfitsPercentage = (stats.totalProfits / stats.investment) * 100; + const gridProfitsPercentage = (stats.gridProfits / stats.investment) * 100; + const gridAprPercentage = (stats.gridProfits / 5) * 365; + + const now = Date.now(); + const durationMilliseconds = now - startTime; + const seconds = durationMilliseconds / 1000; + + return ( + + {strategy} +
{data[strategy].symbol}
+ + + 0 arbitrages in 24 hours / Total {stats.totalArbs}{' '} + arbitrages + + + + + ); +} diff --git a/frontend/components/ExchangeSessionTabPanel.js b/frontend/components/ExchangeSessionTabPanel.js new file mode 100644 index 0000000000..fb76f447c6 --- /dev/null +++ b/frontend/components/ExchangeSessionTabPanel.js @@ -0,0 +1,49 @@ +import Paper from '@mui/material/Paper'; +import Tabs from '@mui/material/Tabs'; +import Tab from '@mui/material/Tab'; +import React, { useEffect, useState } from 'react'; +import { querySessions } from '../api/bbgo'; +import Typography from '@mui/material/Typography'; +import { makeStyles } from '@mui/styles'; + +const useStyles = makeStyles((theme) => ({ + paper: { + margin: theme.spacing(2), + padding: theme.spacing(2), + }, +})); + +export default function ExchangeSessionTabPanel() { + const classes = useStyles(); + + const [tabIndex, setTabIndex] = React.useState(0); + const handleTabClick = (event, newValue) => { + setTabIndex(newValue); + }; + + const [sessions, setSessions] = useState([]); + + useEffect(() => { + querySessions((sessions) => { + setSessions(sessions); + }); + }, []); + + return ( + + + Sessions + + + {sessions.map((session) => { + return ; + })} + + + ); +} diff --git a/frontend/components/ReviewSessions.js b/frontend/components/ReviewSessions.js new file mode 100644 index 0000000000..6eb49c7318 --- /dev/null +++ b/frontend/components/ReviewSessions.js @@ -0,0 +1,88 @@ +import React from 'react'; +import Grid from '@mui/material/Grid'; +import Button from '@mui/material/Button'; +import Typography from '@mui/material/Typography'; +import List from '@mui/material/List'; +import ListItem from '@mui/material/ListItem'; +import ListItemText from '@mui/material/ListItemText'; +import ListItemIcon from '@mui/material/ListItemIcon'; +import PowerIcon from '@mui/icons-material/Power'; + +import { makeStyles } from '@mui/styles'; +import { querySessions } from '../api/bbgo'; + +const useStyles = makeStyles((theme) => ({ + formControl: { + marginTop: theme.spacing(1), + marginBottom: theme.spacing(1), + minWidth: 120, + }, + buttons: { + display: 'flex', + justifyContent: 'flex-end', + marginTop: theme.spacing(2), + paddingTop: theme.spacing(2), + paddingBottom: theme.spacing(2), + '& > *': { + marginLeft: theme.spacing(1), + }, + }, +})); + +export default function ReviewSessions({ onBack, onNext }) { + const classes = useStyles(); + + const [sessions, setSessions] = React.useState([]); + + React.useEffect(() => { + querySessions((sessions) => { + setSessions(sessions); + }); + }, []); + + const items = sessions.map((session, i) => { + console.log(session); + return ( + + + + + + + ); + }); + + return ( + + + Review Sessions + + + {items} + +
+ + + +
+
+ ); +} diff --git a/frontend/components/ReviewStrategies.js b/frontend/components/ReviewStrategies.js new file mode 100644 index 0000000000..085fabefc8 --- /dev/null +++ b/frontend/components/ReviewStrategies.js @@ -0,0 +1,157 @@ +import React from 'react'; +import Button from '@mui/material/Button'; +import Typography from '@mui/material/Typography'; +import List from '@mui/material/List'; +import Card from '@mui/material/Card'; +import CardHeader from '@mui/material/CardHeader'; +import CardContent from '@mui/material/CardContent'; +import Avatar from '@mui/material/Avatar'; +import IconButton from '@mui/material/IconButton'; +import MoreVertIcon from '@mui/icons-material/MoreVert'; +import Table from '@mui/material/Table'; +import TableBody from '@mui/material/TableBody'; +import TableCell from '@mui/material/TableCell'; +import TableContainer from '@mui/material/TableContainer'; +import TableHead from '@mui/material/TableHead'; +import TableRow from '@mui/material/TableRow'; + +import { makeStyles } from '@mui/styles'; +import { queryStrategies } from '../api/bbgo'; + +const useStyles = makeStyles((theme) => ({ + strategyCard: { + margin: theme.spacing(1), + }, + formControl: { + marginTop: theme.spacing(1), + marginBottom: theme.spacing(1), + minWidth: 120, + }, + buttons: { + display: 'flex', + justifyContent: 'flex-end', + marginTop: theme.spacing(2), + paddingTop: theme.spacing(2), + paddingBottom: theme.spacing(2), + '& > *': { + marginLeft: theme.spacing(1), + }, + }, +})); + +function configToTable(config) { + const rows = Object.getOwnPropertyNames(config).map((k) => { + return { + key: k, + val: config[k], + }; + }); + + return ( + + + + + Field + Value + + + + {rows.map((row) => ( + + + {row.key} + + {row.val} + + ))} + +
+
+ ); +} + +export default function ReviewStrategies({ onBack, onNext }) { + const classes = useStyles(); + + const [strategies, setStrategies] = React.useState([]); + + React.useEffect(() => { + queryStrategies((strategies) => { + setStrategies(strategies || []); + }).catch((err) => { + console.error(err); + }); + }, []); + + const items = strategies.map((o, i) => { + const mounts = o.on || []; + delete o.on; + + const config = o[o.strategy]; + + const titleComps = [o.strategy.toUpperCase()]; + if (config.symbol) { + titleComps.push(config.symbol); + } + + const title = titleComps.join(' '); + + return ( + + G} + action={ + + + + } + title={title} + subheader={`Exchange ${mounts.map((m) => m.toUpperCase())}`} + /> + + + Strategy will be executed on session {mounts.join(',')} with the + following configuration: + + + {configToTable(config)} + + + ); + }); + + return ( + + + Review Strategies + + + {items} + +
+ + + +
+
+ ); +} diff --git a/frontend/components/RunningTime.tsx b/frontend/components/RunningTime.tsx new file mode 100644 index 0000000000..07e21c1d6d --- /dev/null +++ b/frontend/components/RunningTime.tsx @@ -0,0 +1,34 @@ +import { styled } from '@mui/styles'; +import { Description } from './Detail'; + +const RunningTimeSection = styled('div')(() => ({ + display: 'flex', + alignItems: 'center', +})); + +const StatusSign = styled('span')(() => ({ + width: '10px', + height: '10px', + display: 'block', + backgroundColor: 'rgb(113, 218, 113)', + borderRadius: '50%', + marginRight: '5px', +})); + +export default function RunningTime({ seconds }: { seconds: number }) { + const day = Math.floor(seconds / (60 * 60 * 24)); + const hour = Math.floor((seconds % (60 * 60 * 24)) / 3600); + const min = Math.floor(((seconds % (60 * 60 * 24)) % 3600) / 60); + + return ( + + + + Running for + {day}D + {hour}H + {min}M + + + ); +} diff --git a/frontend/components/SaveConfigAndRestart.js b/frontend/components/SaveConfigAndRestart.js new file mode 100644 index 0000000000..86350ab564 --- /dev/null +++ b/frontend/components/SaveConfigAndRestart.js @@ -0,0 +1,105 @@ +import React from 'react'; +import { useRouter } from 'next/router'; + +import Button from '@mui/material/Button'; +import Typography from '@mui/material/Typography'; + +import { makeStyles } from '@mui/styles'; + +import { ping, saveConfig, setupRestart } from '../api/bbgo'; +import Box from '@mui/material/Box'; +import Alert from '@mui/lab/Alert'; + +const useStyles = makeStyles((theme) => ({ + strategyCard: { + margin: theme.spacing(1), + }, + formControl: { + marginTop: theme.spacing(1), + marginBottom: theme.spacing(1), + minWidth: 120, + }, + buttons: { + display: 'flex', + justifyContent: 'flex-end', + marginTop: theme.spacing(2), + paddingTop: theme.spacing(2), + paddingBottom: theme.spacing(2), + '& > *': { + marginLeft: theme.spacing(1), + }, + }, +})); + +export default function SaveConfigAndRestart({ onBack, onRestarted }) { + const classes = useStyles(); + + const { push } = useRouter(); + const [response, setResponse] = React.useState({}); + + const handleRestart = () => { + saveConfig((resp) => { + setResponse(resp); + + setupRestart((resp) => { + let t; + t = setInterval(() => { + ping(() => { + clearInterval(t); + push('/'); + }); + }, 1000); + }).catch((err) => { + console.error(err); + setResponse(err.response.data); + }); + + // call restart here + }).catch((err) => { + console.error(err); + setResponse(err.response.data); + }); + }; + + return ( + + + Save Config and Restart + + + + Click "Save and Restart" to save the configurations to the config file{' '} + bbgo.yaml, and save the exchange session credentials to the + dotenv file .env.local. + + +
+ + + +
+ + {response ? ( + response.error ? ( + + {response.error} + + ) : response.success ? ( + + Config Saved + + ) : null + ) : null} +
+ ); +} diff --git a/frontend/components/SideBar.js b/frontend/components/SideBar.js new file mode 100644 index 0000000000..e491099650 --- /dev/null +++ b/frontend/components/SideBar.js @@ -0,0 +1,103 @@ +import Drawer from '@mui/material/Drawer'; +import Divider from '@mui/material/Divider'; +import List from '@mui/material/List'; +import Link from 'next/link'; +import ListItem from '@mui/material/ListItem'; +import ListItemIcon from '@mui/material/ListItemIcon'; +import DashboardIcon from '@mui/icons-material/Dashboard'; +import ListItemText from '@mui/material/ListItemText'; +import ListIcon from '@mui/icons-material/List'; +import TrendingUpIcon from '@mui/icons-material/TrendingUp'; +import React from 'react'; +import { makeStyles } from '@mui/styles'; + +const drawerWidth = 240; + +const useStyles = makeStyles((theme) => ({ + root: { + flexGrow: 1, + display: 'flex', + }, + toolbar: { + paddingRight: 24, // keep right padding when drawer closed + }, + toolbarIcon: { + display: 'flex', + alignItems: 'center', + justifyContent: 'flex-end', + padding: '0 8px', + ...theme.mixins.toolbar, + }, + appBarSpacer: theme.mixins.toolbar, + drawerPaper: { + [theme.breakpoints.up('sm')]: { + width: drawerWidth, + flexShrink: 0, + }, + position: 'relative', + whiteSpace: 'nowrap', + transition: theme.transitions.create('width', { + easing: theme.transitions.easing.sharp, + duration: theme.transitions.duration.enteringScreen, + }), + }, + drawer: { + width: drawerWidth, + }, +})); + +export default function SideBar() { + const classes = useStyles(); + + return ( + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ); +} diff --git a/frontend/components/Stats.tsx b/frontend/components/Stats.tsx new file mode 100644 index 0000000000..bd34bde169 --- /dev/null +++ b/frontend/components/Stats.tsx @@ -0,0 +1,51 @@ +import { styled } from '@mui/styles'; +import { StatsTitle, StatsValue, Percentage } from './Summary'; +import { GridStats } from '../api/bbgo'; + +const StatsSection = styled('div')(() => ({ + display: 'grid', + gridTemplateColumns: '1fr 1fr 1fr', + gap: '10px', +})); + +export default function Stats({ + stats, + gridProfitsPercentage, + gridAprPercentage, +}: { + stats: GridStats; + gridProfitsPercentage: number; + gridAprPercentage: number; +}) { + return ( + +
+ Grid Profits + {stats.gridProfits} + {gridProfitsPercentage}% +
+ +
+ Floating PNL + {stats.floatingPNL} +
+ +
+ Grid APR + {gridAprPercentage}% +
+ +
+ Current Price +
{stats.currentPrice}
+
+ +
+ Price Range +
+ {stats.lowestPrice}~{stats.highestPrice} +
+
+
+ ); +} diff --git a/frontend/components/Summary.tsx b/frontend/components/Summary.tsx new file mode 100644 index 0000000000..a91435fab1 --- /dev/null +++ b/frontend/components/Summary.tsx @@ -0,0 +1,50 @@ +import { styled } from '@mui/styles'; +import { GridStats } from '../api/bbgo'; + +const SummarySection = styled('div')(() => ({ + width: '100%', + display: 'flex', + justifyContent: 'space-around', + backgroundColor: 'rgb(255, 245, 232)', + margin: '10px 0', +})); + +const SummaryBlock = styled('div')(() => ({ + padding: '5px 0 5px 0', +})); + +export const StatsTitle = styled('div')(() => ({ + margin: '0 0 10px 0', +})); + +export const StatsValue = styled('div')(() => ({ + marginBottom: '10px', + color: 'rgb(123, 169, 90)', +})); + +export const Percentage = styled('div')(() => ({ + color: 'rgb(123, 169, 90)', +})); + +export default function Summary({ + stats, + totalProfitsPercentage, +}: { + stats: GridStats; + totalProfitsPercentage: number; +}) { + return ( + + + Investment USDT +
{stats.investment}
+
+ + + Total Profit USDT + {stats.totalProfits} + {totalProfitsPercentage}% + +
+ ); +} diff --git a/frontend/components/SyncButton.tsx b/frontend/components/SyncButton.tsx new file mode 100644 index 0000000000..de00aacb52 --- /dev/null +++ b/frontend/components/SyncButton.tsx @@ -0,0 +1,39 @@ +import { styled } from '@mui/styles'; +import React, { useEffect, useState } from 'react'; +import { querySyncStatus, SyncStatus, triggerSync } from '../api/bbgo'; +import useInterval from '../hooks/useInterval'; + +const ToolbarButton = styled('button')(({ theme }) => ({ + padding: theme.spacing(1), +})); + +export default function SyncButton() { + const [syncing, setSyncing] = useState(false); + + const sync = async () => { + try { + setSyncing(true); + await triggerSync(); + } catch { + setSyncing(false); + } + }; + + useEffect(() => { + sync(); + }, []); + + useInterval(() => { + querySyncStatus().then((s) => { + if (s !== SyncStatus.Syncing) { + setSyncing(false); + } + }); + }, 2000); + + return ( + + {syncing ? 'Syncing...' : 'Sync'} + + ); +} diff --git a/frontend/components/TotalAssetsDetails.js b/frontend/components/TotalAssetsDetails.js new file mode 100644 index 0000000000..b587b72ffc --- /dev/null +++ b/frontend/components/TotalAssetsDetails.js @@ -0,0 +1,87 @@ +import React from 'react'; +import CardContent from '@mui/material/CardContent'; +import Card from '@mui/material/Card'; +import { makeStyles } from '@mui/styles'; +import List from '@mui/material/List'; +import ListItem from '@mui/material/ListItem'; +import ListItemText from '@mui/material/ListItemText'; +import ListItemAvatar from '@mui/material/ListItemAvatar'; +import Avatar from '@mui/material/Avatar'; + +const useStyles = makeStyles((theme) => ({ + root: { + margin: theme.spacing(1), + }, + cardContent: {}, +})); + +const logoCurrencies = { + BTC: true, + ETH: true, + BCH: true, + LTC: true, + USDT: true, + BNB: true, + COMP: true, + XRP: true, + LINK: true, + DOT: true, + SXP: true, + DAI: true, + MAX: true, + TWD: true, + SNT: true, + YFI: true, + GRT: true, +}; + +export default function TotalAssetsDetails({ assets }) { + const classes = useStyles(); + + const sortedAssets = []; + for (let k in assets) { + sortedAssets.push(assets[k]); + } + sortedAssets.sort((a, b) => { + if (a.inUSD > b.inUSD) { + return -1; + } + + if (a.inUSD < b.inUSD) { + return 1; + } + + return 0; + }); + + const items = sortedAssets.map((a) => { + return ( + + {a.currency in logoCurrencies ? ( + + + + ) : ( + + + + )} + + + ); + }); + + return ( + + + {items} + + + ); +} diff --git a/frontend/components/TotalAssetsPie.js b/frontend/components/TotalAssetsPie.js new file mode 100644 index 0000000000..695b8daadd --- /dev/null +++ b/frontend/components/TotalAssetsPie.js @@ -0,0 +1,94 @@ +import React, { useEffect, useState } from 'react'; + +import { ResponsivePie } from '@nivo/pie'; +import { queryAssets } from '../api/bbgo'; +import { currencyColor } from '../src/utils'; +import CardContent from '@mui/material/CardContent'; +import Card from '@mui/material/Card'; +import { makeStyles } from '@mui/styles'; + +function reduceAssetsBy(assets, field, minimum) { + let as = []; + + let others = { id: 'others', labels: 'others', value: 0.0 }; + for (let key in assets) { + if (assets[key]) { + let a = assets[key]; + let value = a[field]; + + if (value < minimum) { + others.value += value; + } else { + as.push({ + id: a.currency, + label: a.currency, + color: currencyColor(a.currency), + value: Math.round(value, 1), + }); + } + } + } + + return as; +} + +const useStyles = makeStyles((theme) => ({ + root: { + margin: theme.spacing(1), + }, + cardContent: { + height: 350, + }, +})); + +export default function TotalAssetsPie({ assets }) { + const classes = useStyles(); + return ( + + + + + + ); +} diff --git a/frontend/components/TotalAssetsSummary.js b/frontend/components/TotalAssetsSummary.js new file mode 100644 index 0000000000..f0d4110299 --- /dev/null +++ b/frontend/components/TotalAssetsSummary.js @@ -0,0 +1,60 @@ +import { useEffect, useState } from 'react'; +import Card from '@mui/material/Card'; +import CardContent from '@mui/material/CardContent'; +import Typography from '@mui/material/Typography'; +import { makeStyles } from '@mui/styles'; + +function aggregateAssetsBy(assets, field) { + let total = 0.0; + for (let key in assets) { + if (assets[key]) { + let a = assets[key]; + let value = a[field]; + total += value; + } + } + + return total; +} + +const useStyles = makeStyles((theme) => ({ + root: { + margin: theme.spacing(1), + }, + title: { + fontSize: 14, + }, + pos: { + marginTop: 12, + }, +})); + +export default function TotalAssetSummary({ assets }) { + const classes = useStyles(); + return ( + + + + Total Account Balance + + + {Math.round(aggregateAssetsBy(assets, 'inBTC') * 1e8) / 1e8}{' '} + BTC + + + + Estimated Value + + + + {Math.round(aggregateAssetsBy(assets, 'inUSD') * 100) / 100}{' '} + USD + + + + ); +} diff --git a/frontend/components/TradingVolumeBar.js b/frontend/components/TradingVolumeBar.js new file mode 100644 index 0000000000..b9b0773b2c --- /dev/null +++ b/frontend/components/TradingVolumeBar.js @@ -0,0 +1,161 @@ +import { ResponsiveBar } from '@nivo/bar'; +import { queryTradingVolume } from '../api/bbgo'; +import { useEffect, useState } from 'react'; + +function toPeriodDateString(time, period) { + switch (period) { + case 'day': + return ( + time.getFullYear() + '-' + (time.getMonth() + 1) + '-' + time.getDate() + ); + case 'month': + return time.getFullYear() + '-' + (time.getMonth() + 1); + case 'year': + return time.getFullYear(); + } + + return ( + time.getFullYear() + '-' + (time.getMonth() + 1) + '-' + time.getDate() + ); +} + +function groupData(rows, period, segment) { + let dateIndex = {}; + let startTime = null; + let endTime = null; + let keys = {}; + + rows.forEach((v) => { + const time = new Date(v.time); + if (!startTime) { + startTime = time; + } + + endTime = time; + + const dateStr = toPeriodDateString(time, period); + const key = v[segment]; + + keys[key] = true; + + const k = key ? key : 'total'; + const quoteVolume = Math.round(v.quoteVolume * 100) / 100; + + if (dateIndex[dateStr]) { + dateIndex[dateStr][k] = quoteVolume; + } else { + dateIndex[dateStr] = { + date: dateStr, + year: time.getFullYear(), + month: time.getMonth() + 1, + day: time.getDate(), + [k]: quoteVolume, + }; + } + }); + + let data = []; + while (startTime < endTime) { + const dateStr = toPeriodDateString(startTime, period); + const groupData = dateIndex[dateStr]; + if (groupData) { + data.push(groupData); + } else { + data.push({ + date: dateStr, + year: startTime.getFullYear(), + month: startTime.getMonth() + 1, + day: startTime.getDate(), + total: 0, + }); + } + + switch (period) { + case 'day': + startTime.setDate(startTime.getDate() + 1); + break; + case 'month': + startTime.setMonth(startTime.getMonth() + 1); + break; + case 'year': + startTime.setFullYear(startTime.getFullYear() + 1); + break; + } + } + + return [data, Object.keys(keys)]; +} + +export default function TradingVolumeBar(props) { + const [tradingVolumes, setTradingVolumes] = useState([]); + const [period, setPeriod] = useState(props.period); + const [segment, setSegment] = useState(props.segment); + + useEffect(() => { + if (props.period !== period) { + setPeriod(props.period); + } + + if (props.segment !== segment) { + setSegment(props.segment); + } + + queryTradingVolume( + { period: props.period, segment: props.segment }, + (tradingVolumes) => { + setTradingVolumes(tradingVolumes); + } + ); + }, [props.period, props.segment]); + + const [data, keys] = groupData(tradingVolumes, period, segment); + + return ( + + ); +} diff --git a/frontend/components/TradingVolumePanel.js b/frontend/components/TradingVolumePanel.js new file mode 100644 index 0000000000..165ebccff1 --- /dev/null +++ b/frontend/components/TradingVolumePanel.js @@ -0,0 +1,72 @@ +import Paper from '@mui/material/Paper'; +import Box from '@mui/material/Box'; +import Tabs from '@mui/material/Tabs'; +import Tab from '@mui/material/Tab'; +import React from 'react'; +import TradingVolumeBar from './TradingVolumeBar'; +import { makeStyles } from '@mui/styles'; +import Grid from '@mui/material/Grid'; +import Typography from '@mui/material/Typography'; + +const useStyles = makeStyles((theme) => ({ + tradingVolumeBarBox: { + height: 400, + }, + paper: { + margin: theme.spacing(2), + padding: theme.spacing(2), + }, +})); + +export default function TradingVolumePanel() { + const [period, setPeriod] = React.useState('day'); + const [segment, setSegment] = React.useState('exchange'); + const classes = useStyles(); + const handlePeriodChange = (event, newValue) => { + setPeriod(newValue); + }; + + const handleSegmentChange = (event, newValue) => { + setSegment(newValue); + }; + + return ( + + + Trading Volume + + + + + + + + + + + + + + + + + + + + + + + + + ); +} diff --git a/frontend/hooks/useInterval.ts b/frontend/hooks/useInterval.ts new file mode 100644 index 0000000000..9a54d776a6 --- /dev/null +++ b/frontend/hooks/useInterval.ts @@ -0,0 +1,20 @@ +import { useEffect, useRef } from 'react'; + +export default function useInterval(cb: Function, delayMs: number | null) { + const savedCallback = useRef(); + + useEffect(() => { + savedCallback.current = cb; + }, [cb]); + + useEffect(() => { + function tick() { + savedCallback.current(); + } + + if (delayMs !== null) { + let timerId = setInterval(tick, delayMs); + return () => clearInterval(timerId); + } + }, [delayMs]); +} diff --git a/frontend/layouts/DashboardLayout.js b/frontend/layouts/DashboardLayout.js new file mode 100644 index 0000000000..1f9ffd8f0a --- /dev/null +++ b/frontend/layouts/DashboardLayout.js @@ -0,0 +1,65 @@ +import React from 'react'; + +import { makeStyles } from '@mui/styles'; +import AppBar from '@mui/material/AppBar'; +import Toolbar from '@mui/material/Toolbar'; +import Typography from '@mui/material/Typography'; +import Container from '@mui/material/Container'; + +import SideBar from '../components/SideBar'; +import SyncButton from '../components/SyncButton'; + +import ConnectWallet from '../components/ConnectWallet'; +import { Box } from '@mui/material'; + +const useStyles = makeStyles((theme) => ({ + root: { + flexGrow: 1, + display: 'flex', + }, + content: { + flexGrow: 1, + height: '100vh', + overflow: 'auto', + }, + appBar: { + zIndex: theme.zIndex.drawer + 1, + }, + appBarSpacer: theme.mixins.toolbar, + container: {}, + toolbar: { + justifyContent: 'space-between', + }, +})); + +export default function DashboardLayout({ children }) { + const classes = useStyles(); + + return ( +
+ + + + BBGO + + + + + + + + + +
+
+ + {children} + +
+
+ ); +} diff --git a/frontend/layouts/PlainLayout.js b/frontend/layouts/PlainLayout.js new file mode 100644 index 0000000000..8bbca9d773 --- /dev/null +++ b/frontend/layouts/PlainLayout.js @@ -0,0 +1,43 @@ +import React from 'react'; + +import { makeStyles } from '@mui/styles'; +import AppBar from '@mui/material/AppBar'; +import Toolbar from '@mui/material/Toolbar'; +import Typography from '@mui/material/Typography'; +import Container from '@mui/material/Container'; + +const useStyles = makeStyles((theme) => ({ + root: { + // flexGrow: 1, + display: 'flex', + }, + content: { + flexGrow: 1, + height: '100vh', + overflow: 'auto', + }, + appBar: { + zIndex: theme.zIndex.drawer + 1, + }, + appBarSpacer: theme.mixins.toolbar, +})); + +export default function PlainLayout(props) { + const classes = useStyles(); + return ( +
+ + + + {props && props.title ? props.title : 'BBGO Setup Wizard'} + + + + +
+
+ {props.children} +
+
+ ); +} diff --git a/frontend/next-env.d.ts b/frontend/next-env.d.ts new file mode 100644 index 0000000000..4f11a03dc6 --- /dev/null +++ b/frontend/next-env.d.ts @@ -0,0 +1,5 @@ +/// +/// + +// NOTE: This file should not be edited +// see https://nextjs.org/docs/basic-features/typescript for more information. diff --git a/frontend/next.config.js b/frontend/next.config.js new file mode 100644 index 0000000000..c9fac5d498 --- /dev/null +++ b/frontend/next.config.js @@ -0,0 +1,9 @@ +module.exports = async (phase, { defaultConfig }) => { + /** + * @type {import('next').NextConfig} + */ + const nextConfig = { + /* config options here */ + } + return nextConfig +} \ No newline at end of file diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000000..0096838400 --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,42 @@ +{ + "name": "frontend", + "version": "0.1.0", + "private": true, + "scripts": { + "dev": "yarn run next dev", + "build": "yarn run next build", + "start": "yarn run next start", + "export": "yarn run next build && yarn run next export", + "prettier": "prettier --write ." + }, + "dependencies": { + "@emotion/react": "^11.9.3", + "@emotion/styled": "^11.9.3", + "@ethersproject/units": "^5.6.1", + "@mui/icons-material": "^5.8.3", + "@mui/lab": "^5.0.0-alpha.85", + "@mui/material": "^5.8.3", + "@mui/styles": "^5.8.3", + "@mui/x-data-grid": "^5.12.1", + "@nivo/bar": "^0.79.1", + "@nivo/core": "^0.79.0", + "@nivo/pie": "^0.79.1", + "@usedapp/core": "1.0.9", + "axios": "^0.27.2", + "classnames": "^2.2.6", + "ethers": "^5.6.9", + "isomorphic-fetch": "^3.0.0", + "next": "12", + "qrcode.react": "^3.0.1", + "react": "^18.2.0", + "react-dom": "^18.2.0", + "react-number-format": "^4.4.4" + }, + "devDependencies": { + "@types/node": "^18.0.0", + "@types/react": "^18.0.14", + "next-transpile-modules": "^9.0.0", + "prettier": "^2.6.2", + "typescript": "^4.1.3" + } +} diff --git a/frontend/pages/_app.tsx b/frontend/pages/_app.tsx new file mode 100644 index 0000000000..78178c8016 --- /dev/null +++ b/frontend/pages/_app.tsx @@ -0,0 +1,43 @@ +import React, { useEffect } from 'react'; +import PropTypes from 'prop-types'; +import Head from 'next/head'; + +import { ThemeProvider } from '@mui/material/styles'; + +import CssBaseline from '@mui/material/CssBaseline'; +import theme from '../src/theme'; +import '../styles/globals.css'; + +export default function MyApp(props) { + const { Component, pageProps } = props; + + useEffect(() => { + // Remove the server-side injected CSS. + const jssStyles = document.querySelector('#jss-server-side'); + if (jssStyles) { + jssStyles.parentElement.removeChild(jssStyles); + } + }, []); + + return ( + + + BBGO + + + + {/* CssBaseline kickstart an elegant, consistent, and simple baseline to build upon. */} + + + + + ); +} + +MyApp.propTypes = { + Component: PropTypes.elementType.isRequired, + pageProps: PropTypes.object.isRequired, +}; diff --git a/frontend/pages/_document.js b/frontend/pages/_document.js new file mode 100644 index 0000000000..72d1a1fdd5 --- /dev/null +++ b/frontend/pages/_document.js @@ -0,0 +1,72 @@ +/* eslint-disable react/jsx-filename-extension */ +import React from 'react'; +import Document, { Html, Head, Main, NextScript } from 'next/document'; +import { ServerStyleSheets } from '@mui/styles'; +import theme from '../src/theme'; + +export default class MyDocument extends Document { + render() { + return ( + + + {/* PWA primary color */} + + + + +
+ + + + ); + } +} + +// `getInitialProps` belongs to `_document` (instead of `_app`), +// it's compatible with server-side generation (SSG). +MyDocument.getInitialProps = async (ctx) => { + // Resolution order + // + // On the server: + // 1. app.getInitialProps + // 2. page.getInitialProps + // 3. document.getInitialProps + // 4. app.render + // 5. page.render + // 6. document.render + // + // On the server with error: + // 1. document.getInitialProps + // 2. app.render + // 3. page.render + // 4. document.render + // + // On the client + // 1. app.getInitialProps + // 2. page.getInitialProps + // 3. app.render + // 4. page.render + + // Render app and page and get the context of the page with collected side effects. + const sheets = new ServerStyleSheets(); + const originalRenderPage = ctx.renderPage; + + ctx.renderPage = () => + originalRenderPage({ + enhanceApp: (App) => (props) => sheets.collect(), + }); + + const initialProps = await Document.getInitialProps(ctx); + + return { + ...initialProps, + // Styles fragment is rendered after the app and page rendering finish. + styles: [ + ...React.Children.toArray(initialProps.styles), + sheets.getStyleElement(), + ], + }; +}; diff --git a/frontend/pages/api/hello.js b/frontend/pages/api/hello.js new file mode 100644 index 0000000000..07d9d9ba2b --- /dev/null +++ b/frontend/pages/api/hello.js @@ -0,0 +1,6 @@ +// Next.js API route support: https://nextjs.org/docs/api-routes/introduction + +export default (req, res) => { + res.statusCode = 200; + res.json({ name: 'John Doe' }); +}; diff --git a/frontend/pages/connect/index.js b/frontend/pages/connect/index.js new file mode 100644 index 0000000000..0e2eb47dc7 --- /dev/null +++ b/frontend/pages/connect/index.js @@ -0,0 +1,55 @@ +import React, { useEffect, useState } from 'react'; + +import { makeStyles } from '@mui/styles'; +import Typography from '@mui/material/Typography'; +import Paper from '@mui/material/Paper'; +import PlainLayout from '../../layouts/PlainLayout'; +import { QRCodeSVG } from 'qrcode.react'; +import { queryOutboundIP } from '../../api/bbgo'; + +const useStyles = makeStyles((theme) => ({ + paper: { + margin: theme.spacing(2), + padding: theme.spacing(2), + }, + dataGridContainer: { + display: 'flex', + textAlign: 'center', + alignItems: 'center', + alignContent: 'center', + height: 320, + }, +})); + +function fetchConnectUrl(cb) { + return queryOutboundIP((outboundIP) => { + cb( + window.location.protocol + '//' + outboundIP + ':' + window.location.port + ); + }); +} + +export default function Connect() { + const classes = useStyles(); + + const [connectUrl, setConnectUrl] = useState([]); + + useEffect(() => { + fetchConnectUrl(function (url) { + setConnectUrl(url); + }); + }, []); + + return ( + + + + Sign In Using QR Codes + +
+ +
+
+
+ ); +} diff --git a/frontend/pages/index.tsx b/frontend/pages/index.tsx new file mode 100644 index 0000000000..3185b8164d --- /dev/null +++ b/frontend/pages/index.tsx @@ -0,0 +1,121 @@ +import React, { useState } from 'react'; +import { useRouter } from 'next/router'; + +import { makeStyles } from '@mui/styles'; +import Typography from '@mui/material/Typography'; +import Box from '@mui/material/Box'; +import Grid from '@mui/material/Grid'; +import Paper from '@mui/material/Paper'; + +import TotalAssetsPie from '../components/TotalAssetsPie'; +import TotalAssetSummary from '../components/TotalAssetsSummary'; +import TotalAssetDetails from '../components/TotalAssetsDetails'; + +import TradingVolumePanel from '../components/TradingVolumePanel'; +import ExchangeSessionTabPanel from '../components/ExchangeSessionTabPanel'; + +import DashboardLayout from '../layouts/DashboardLayout'; + +import { queryAssets, querySessions } from '../api/bbgo'; + +import { ChainId, Config, DAppProvider } from '@usedapp/core'; +import { Theme } from '@mui/material/styles'; + +// fix the `theme.spacing` missing error +// https://stackoverflow.com/a/70707121/3897950 +declare module '@mui/styles/defaultTheme' { + // eslint-disable-next-line @typescript-eslint/no-empty-interface (remove this line if you don't have the rule enabled) + interface DefaultTheme extends Theme {} +} + +const useStyles = makeStyles((theme) => ({ + totalAssetsSummary: { + margin: theme.spacing(2), + padding: theme.spacing(2), + }, + grid: { + flexGrow: 1, + }, + control: { + padding: theme.spacing(2), + }, +})); + +const config: Config = { + readOnlyChainId: ChainId.Mainnet, + readOnlyUrls: { + [ChainId.Mainnet]: + 'https://mainnet.infura.io/v3/9aa3d95b3bc440fa88ea12eaa4456161', + }, +}; + +// props are pageProps passed from _app.tsx +export default function Home() { + const classes = useStyles(); + const router = useRouter(); + + const [assets, setAssets] = useState({}); + const [sessions, setSessions] = React.useState([]); + + React.useEffect(() => { + querySessions((sessions) => { + if (sessions && sessions.length > 0) { + setSessions(sessions); + queryAssets(setAssets); + } else { + router.push('/setup'); + } + }).catch((err) => { + console.error(err); + }); + }, [router]); + + if (sessions.length == 0) { + return ( + + + + Loading + + + + ); + } + + console.log('index: assets', assets); + + return ( + + + + + Total Assets + + +
+ + + + + + + + + + +
+
+ + + + +
+
+ ); +} diff --git a/frontend/pages/orders.js b/frontend/pages/orders.js new file mode 100644 index 0000000000..744b67e486 --- /dev/null +++ b/frontend/pages/orders.js @@ -0,0 +1,81 @@ +import React, { useEffect, useState } from 'react'; + +import { makeStyles } from '@mui/styles'; +import Typography from '@mui/material/Typography'; +import Paper from '@mui/material/Paper'; +import { queryClosedOrders } from '../api/bbgo'; +import { DataGrid } from '@mui/x-data-grid'; +import DashboardLayout from '../layouts/DashboardLayout'; + +const columns = [ + { field: 'gid', headerName: 'GID', width: 80, type: 'number' }, + { field: 'clientOrderID', headerName: 'Client Order ID', width: 130 }, + { field: 'exchange', headerName: 'Exchange' }, + { field: 'symbol', headerName: 'Symbol' }, + { field: 'orderType', headerName: 'Type' }, + { field: 'side', headerName: 'Side', width: 90 }, + { + field: 'averagePrice', + headerName: 'Average Price', + type: 'number', + width: 120, + }, + { field: 'quantity', headerName: 'Quantity', type: 'number' }, + { + field: 'executedQuantity', + headerName: 'Executed Quantity', + type: 'number', + }, + { field: 'status', headerName: 'Status' }, + { field: 'isMargin', headerName: 'Margin' }, + { field: 'isIsolated', headerName: 'Isolated' }, + { field: 'creationTime', headerName: 'Create Time', width: 200 }, +]; + +const useStyles = makeStyles((theme) => ({ + paper: { + margin: theme.spacing(2), + padding: theme.spacing(2), + }, + dataGridContainer: { + display: 'flex', + height: 'calc(100vh - 64px - 120px)', + }, +})); + +export default function Orders() { + const classes = useStyles(); + + const [orders, setOrders] = useState([]); + + useEffect(() => { + queryClosedOrders({}, (orders) => { + setOrders( + orders.map((o) => { + o.id = o.gid; + return o; + }) + ); + }); + }, []); + + return ( + + + + Orders + +
+
+ +
+
+
+
+ ); +} diff --git a/frontend/pages/setup/index.js b/frontend/pages/setup/index.js new file mode 100644 index 0000000000..664e479805 --- /dev/null +++ b/frontend/pages/setup/index.js @@ -0,0 +1,132 @@ +import React from 'react'; + +import { makeStyles } from '@mui/styles'; +import Typography from '@mui/material/Typography'; +import Box from '@mui/material/Box'; +import Paper from '@mui/material/Paper'; +import Stepper from '@mui/material/Stepper'; +import Step from '@mui/material/Step'; +import StepLabel from '@mui/material/StepLabel'; + +import ConfigureDatabaseForm from '../../components/ConfigureDatabaseForm'; +import AddExchangeSessionForm from '../../components/AddExchangeSessionForm'; +import ReviewSessions from '../../components/ReviewSessions'; +import ConfigureGridStrategyForm from '../../components/ConfigureGridStrategyForm'; +import ReviewStrategies from '../../components/ReviewStrategies'; +import SaveConfigAndRestart from '../../components/SaveConfigAndRestart'; + +import PlainLayout from '../../layouts/PlainLayout'; + +const useStyles = makeStyles((theme) => ({ + paper: { + padding: theme.spacing(2), + }, +})); + +const steps = [ + 'Configure Database', + 'Add Exchange Session', + 'Review Sessions', + 'Configure Strategy', + 'Review Strategies', + 'Save Config and Restart', +]; + +function getStepContent(step, setActiveStep) { + switch (step) { + case 0: + return ( + { + setActiveStep(1); + }} + /> + ); + case 1: + return ( + { + setActiveStep(0); + }} + onAdded={() => { + setActiveStep(2); + }} + /> + ); + case 2: + return ( + { + setActiveStep(1); + }} + onNext={() => { + setActiveStep(3); + }} + /> + ); + case 3: + return ( + { + setActiveStep(2); + }} + onAdded={() => { + setActiveStep(4); + }} + /> + ); + case 4: + return ( + { + setActiveStep(3); + }} + onNext={() => { + setActiveStep(5); + }} + /> + ); + + case 5: + return ( + { + setActiveStep(4); + }} + onRestarted={() => {}} + /> + ); + + default: + throw new Error('Unknown step'); + } +} + +export default function Setup() { + const classes = useStyles(); + const [activeStep, setActiveStep] = React.useState(0); + + return ( + + + + + Setup Session + + + + {steps.map((label) => ( + + {label} + + ))} + + + + {getStepContent(activeStep, setActiveStep)} + + + + + ); +} diff --git a/frontend/pages/strategies.tsx b/frontend/pages/strategies.tsx new file mode 100644 index 0000000000..1ff03af142 --- /dev/null +++ b/frontend/pages/strategies.tsx @@ -0,0 +1,43 @@ +import { styled } from '@mui/styles'; +import DashboardLayout from '../layouts/DashboardLayout'; +import { useEffect, useState } from 'react'; +import { queryStrategiesMetrics } from '../api/bbgo'; +import type { GridStrategy } from '../api/bbgo'; + +import Detail from '../components/Detail'; + +const StrategiesContainer = styled('div')(() => ({ + width: '100%', + height: '100%', + padding: '40px 20px', + display: 'grid', + gridTemplateColumns: 'repeat(3, 350px);', + justifyContent: 'center', + gap: '30px', + '@media(max-width: 1400px)': { + gridTemplateColumns: 'repeat(2, 350px)', + }, + '@media(max-width: 1000px)': { + gridTemplateColumns: '350px', + }, +})); + +export default function Strategies() { + const [details, setDetails] = useState([]); + + useEffect(() => { + queryStrategiesMetrics().then((value) => { + setDetails(value); + }); + }, []); + + return ( + + + {details.map((element) => { + return ; + })} + + + ); +} diff --git a/frontend/pages/trades.js b/frontend/pages/trades.js new file mode 100644 index 0000000000..d300f95de4 --- /dev/null +++ b/frontend/pages/trades.js @@ -0,0 +1,68 @@ +import React, { useEffect, useState } from 'react'; + +import { makeStyles } from '@mui/styles'; +import Typography from '@mui/material/Typography'; +import Paper from '@mui/material/Paper'; +import { queryTrades } from '../api/bbgo'; +import { DataGrid } from '@mui/x-data-grid'; +import DashboardLayout from '../layouts/DashboardLayout'; + +const columns = [ + { field: 'gid', headerName: 'GID', width: 80, type: 'number' }, + { field: 'exchange', headerName: 'Exchange' }, + { field: 'symbol', headerName: 'Symbol' }, + { field: 'side', headerName: 'Side', width: 90 }, + { field: 'price', headerName: 'Price', type: 'number', width: 120 }, + { field: 'quantity', headerName: 'Quantity', type: 'number' }, + { field: 'isMargin', headerName: 'Margin' }, + { field: 'isIsolated', headerName: 'Isolated' }, + { field: 'tradedAt', headerName: 'Trade Time', width: 200 }, +]; + +const useStyles = makeStyles((theme) => ({ + paper: { + margin: theme.spacing(2), + padding: theme.spacing(2), + }, + dataGridContainer: { + display: 'flex', + height: 'calc(100vh - 64px - 120px)', + }, +})); + +export default function Trades() { + const classes = useStyles(); + + const [trades, setTrades] = useState([]); + + useEffect(() => { + queryTrades({}, (trades) => { + setTrades( + trades.map((o) => { + o.id = o.gid; + return o; + }) + ); + }); + }, []); + + return ( + + + + Trades + +
+
+ +
+
+
+
+ ); +} diff --git a/frontend/public/favicon.ico b/frontend/public/favicon.ico new file mode 100644 index 0000000000..4965832f2c Binary files /dev/null and b/frontend/public/favicon.ico differ diff --git a/frontend/public/images/bch-logo.svg b/frontend/public/images/bch-logo.svg new file mode 100644 index 0000000000..127da329ce --- /dev/null +++ b/frontend/public/images/bch-logo.svg @@ -0,0 +1,18 @@ + + + + + + + + + diff --git a/frontend/public/images/bnb-logo.svg b/frontend/public/images/bnb-logo.svg new file mode 100644 index 0000000000..91a66e05f6 --- /dev/null +++ b/frontend/public/images/bnb-logo.svg @@ -0,0 +1 @@ +bi \ No newline at end of file diff --git a/frontend/public/images/btc-logo.svg b/frontend/public/images/btc-logo.svg new file mode 100644 index 0000000000..2b75c99bc2 --- /dev/null +++ b/frontend/public/images/btc-logo.svg @@ -0,0 +1,15 @@ + + + + + + + + + + + + diff --git a/frontend/public/images/comp-logo.svg b/frontend/public/images/comp-logo.svg new file mode 100644 index 0000000000..0face3c9d0 --- /dev/null +++ b/frontend/public/images/comp-logo.svg @@ -0,0 +1,16 @@ + + + + + + + diff --git a/frontend/public/images/dai-logo.svg b/frontend/public/images/dai-logo.svg new file mode 100644 index 0000000000..2ae2e32ff0 --- /dev/null +++ b/frontend/public/images/dai-logo.svg @@ -0,0 +1,13 @@ + + + + + + + + + + diff --git a/frontend/public/images/dot-logo.svg b/frontend/public/images/dot-logo.svg new file mode 100644 index 0000000000..89d0e71a0d --- /dev/null +++ b/frontend/public/images/dot-logo.svg @@ -0,0 +1,20 @@ + + + + +polkadot + + + diff --git a/frontend/public/images/eth-logo.svg b/frontend/public/images/eth-logo.svg new file mode 100644 index 0000000000..684e968735 --- /dev/null +++ b/frontend/public/images/eth-logo.svg @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + diff --git a/frontend/public/images/grt-logo.svg b/frontend/public/images/grt-logo.svg new file mode 100644 index 0000000000..a3db5b7b43 --- /dev/null +++ b/frontend/public/images/grt-logo.svg @@ -0,0 +1,19 @@ + + + + + + + + + + + diff --git a/frontend/public/images/link-logo.svg b/frontend/public/images/link-logo.svg new file mode 100644 index 0000000000..bf4cd5374e --- /dev/null +++ b/frontend/public/images/link-logo.svg @@ -0,0 +1 @@ +Asset 1 \ No newline at end of file diff --git a/frontend/public/images/ltc-logo.svg b/frontend/public/images/ltc-logo.svg new file mode 100644 index 0000000000..13e76a40ee --- /dev/null +++ b/frontend/public/images/ltc-logo.svg @@ -0,0 +1 @@ +litecoin-ltc-logo \ No newline at end of file diff --git a/frontend/public/images/max-logo.svg b/frontend/public/images/max-logo.svg new file mode 100644 index 0000000000..fae1cdfc4c --- /dev/null +++ b/frontend/public/images/max-logo.svg @@ -0,0 +1 @@ + diff --git a/frontend/public/images/snt-logo.svg b/frontend/public/images/snt-logo.svg new file mode 100644 index 0000000000..f7bd0b7358 --- /dev/null +++ b/frontend/public/images/snt-logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/images/sxp-logo.svg b/frontend/public/images/sxp-logo.svg new file mode 100644 index 0000000000..27a4983498 --- /dev/null +++ b/frontend/public/images/sxp-logo.svg @@ -0,0 +1 @@ +Asset 2 \ No newline at end of file diff --git a/frontend/public/images/twd-logo.svg b/frontend/public/images/twd-logo.svg new file mode 100644 index 0000000000..c1515e791d --- /dev/null +++ b/frontend/public/images/twd-logo.svg @@ -0,0 +1 @@ + diff --git a/frontend/public/images/usdt-logo.svg b/frontend/public/images/usdt-logo.svg new file mode 100644 index 0000000000..e530822408 --- /dev/null +++ b/frontend/public/images/usdt-logo.svg @@ -0,0 +1 @@ +tether-usdt-logo \ No newline at end of file diff --git a/frontend/public/images/xrp-logo.svg b/frontend/public/images/xrp-logo.svg new file mode 100644 index 0000000000..9a2c7c6321 --- /dev/null +++ b/frontend/public/images/xrp-logo.svg @@ -0,0 +1 @@ +x \ No newline at end of file diff --git a/frontend/public/images/yfi-logo.svg b/frontend/public/images/yfi-logo.svg new file mode 100644 index 0000000000..9b4b0607ce --- /dev/null +++ b/frontend/public/images/yfi-logo.svg @@ -0,0 +1 @@ +yearn-finance-yfi diff --git a/frontend/public/vercel.svg b/frontend/public/vercel.svg new file mode 100644 index 0000000000..fbf0e25a65 --- /dev/null +++ b/frontend/public/vercel.svg @@ -0,0 +1,4 @@ + + + \ No newline at end of file diff --git a/frontend/src/theme.js b/frontend/src/theme.js new file mode 100644 index 0000000000..ec5e14d587 --- /dev/null +++ b/frontend/src/theme.js @@ -0,0 +1,24 @@ +import { createTheme } from '@mui/material/styles'; +import { red } from '@mui/material/colors'; + +// Create a theme instance. +const theme = createTheme({ + palette: { + primary: { + main: '#eb9534', + contrastText: '#ffffff', + }, + secondary: { + main: '#ccc0b1', + contrastText: '#eb9534', + }, + error: { + main: red.A400, + }, + background: { + default: '#fff', + }, + }, +}); + +export default theme; diff --git a/frontend/src/utils.js b/frontend/src/utils.js new file mode 100644 index 0000000000..54b09ce7a3 --- /dev/null +++ b/frontend/src/utils.js @@ -0,0 +1,37 @@ +export function currencyColor(currency) { + switch (currency) { + case 'BTC': + return '#f69c3d'; + case 'ETH': + return '#497493'; + case 'MCO': + return '#032144'; + case 'OMG': + return '#2159ec'; + case 'LTC': + return '#949494'; + case 'USDT': + return '#2ea07b'; + case 'SAND': + return '#2E9AD0'; + case 'XRP': + return '#00AAE4'; + case 'BCH': + return '#8DC351'; + case 'MAX': + return '#2D4692'; + case 'TWD': + return '#4A7DED'; + } +} + +export function throttle(fn, delayMillis) { + let permitted = true; + return () => { + if (permitted) { + fn.apply(this, arguments); + permitted = false; + setTimeout(() => (permitted = true), delayMillis); + } + }; +} diff --git a/frontend/styles/Home.module.css b/frontend/styles/Home.module.css new file mode 100644 index 0000000000..42e7e60094 --- /dev/null +++ b/frontend/styles/Home.module.css @@ -0,0 +1,122 @@ +.container { + min-height: 100vh; + padding: 0 0.5rem; + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; +} + +.main { + padding: 5rem 0; + flex: 1; + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; +} + +.footer { + width: 100%; + height: 100px; + border-top: 1px solid #eaeaea; + display: flex; + justify-content: center; + align-items: center; +} + +.footer img { + margin-left: 0.5rem; +} + +.footer a { + display: flex; + justify-content: center; + align-items: center; +} + +.title a { + color: #0070f3; + text-decoration: none; +} + +.title a:hover, +.title a:focus, +.title a:active { + text-decoration: underline; +} + +.title { + margin: 0; + line-height: 1.15; + font-size: 4rem; +} + +.title, +.description { + text-align: center; +} + +.description { + line-height: 1.5; + font-size: 1.5rem; +} + +.code { + background: #fafafa; + border-radius: 5px; + padding: 0.75rem; + font-size: 1.1rem; + font-family: Menlo, Monaco, Lucida Console, Liberation Mono, DejaVu Sans Mono, + Bitstream Vera Sans Mono, Courier New, monospace; +} + +.grid { + display: flex; + align-items: center; + justify-content: center; + flex-wrap: wrap; + max-width: 800px; + margin-top: 3rem; +} + +.card { + margin: 1rem; + flex-basis: 45%; + padding: 1.5rem; + text-align: left; + color: inherit; + text-decoration: none; + border: 1px solid #eaeaea; + border-radius: 10px; + transition: color 0.15s ease, border-color 0.15s ease; +} + +.card:hover, +.card:focus, +.card:active { + color: #0070f3; + border-color: #0070f3; +} + +.card h3 { + margin: 0 0 1rem 0; + font-size: 1.5rem; +} + +.card p { + margin: 0; + font-size: 1.25rem; + line-height: 1.5; +} + +.logo { + height: 1em; +} + +@media (max-width: 600px) { + .grid { + width: 100%; + flex-direction: column; + } +} diff --git a/frontend/styles/globals.css b/frontend/styles/globals.css new file mode 100644 index 0000000000..e5e2dcc23b --- /dev/null +++ b/frontend/styles/globals.css @@ -0,0 +1,16 @@ +html, +body { + padding: 0; + margin: 0; + font-family: -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, Oxygen, + Ubuntu, Cantarell, Fira Sans, Droid Sans, Helvetica Neue, sans-serif; +} + +a { + color: inherit; + text-decoration: none; +} + +* { + box-sizing: border-box; +} diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json new file mode 100644 index 0000000000..5bee8c4d57 --- /dev/null +++ b/frontend/tsconfig.json @@ -0,0 +1,30 @@ +{ + "compilerOptions": { + "target": "es5", + "lib": [ + "dom", + "dom.iterable", + "esnext" + ], + "allowJs": true, + "skipLibCheck": true, + "strict": false, + "forceConsistentCasingInFileNames": true, + "noEmit": true, + "esModuleInterop": true, + "module": "esnext", + "moduleResolution": "node", + "resolveJsonModule": true, + "isolatedModules": true, + "jsx": "preserve", + "incremental": true + }, + "include": [ + "next-env.d.ts", + "**/*.ts", + "**/*.tsx" + ], + "exclude": [ + "node_modules" + ] +} diff --git a/frontend/yarn.lock b/frontend/yarn.lock new file mode 100644 index 0000000000..8537ee4f1f --- /dev/null +++ b/frontend/yarn.lock @@ -0,0 +1,2136 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@ampproject/remapping@^2.1.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" + integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== + dependencies: + "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.16.7.tgz#44416b6bd7624b998f5b1af5d470856c40138789" + integrity sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg== + dependencies: + "@babel/highlight" "^7.16.7" + +"@babel/compat-data@^7.17.10": + version "7.18.5" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.18.5.tgz#acac0c839e317038c73137fbb6ef71a1d6238471" + integrity sha512-BxhE40PVCBxVEJsSBhB6UWyAuqJRxGsAw8BdHMJ3AKGydcwuWW4kOO3HmqBQAdcq/OP+/DlTVxLvsCzRTnZuGg== + +"@babel/core@^7.0.0": + version "7.18.5" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.18.5.tgz#c597fa680e58d571c28dda9827669c78cdd7f000" + integrity sha512-MGY8vg3DxMnctw0LdvSEojOsumc70g0t18gNyUdAZqB1Rpd1Bqo/svHGvt+UJ6JcGX+DIekGFDxxIWofBxLCnQ== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.16.7" + "@babel/generator" "^7.18.2" + "@babel/helper-compilation-targets" "^7.18.2" + "@babel/helper-module-transforms" "^7.18.0" + "@babel/helpers" "^7.18.2" + "@babel/parser" "^7.18.5" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.18.5" + "@babel/types" "^7.18.4" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + +"@babel/generator@^7.18.2": + version "7.18.2" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.18.2.tgz#33873d6f89b21efe2da63fe554460f3df1c5880d" + integrity sha512-W1lG5vUwFvfMd8HVXqdfbuG7RuaSrTCCD8cl8fP8wOivdbtbIg2Db3IWUcgvfxKbbn6ZBGYRW/Zk1MIwK49mgw== + dependencies: + "@babel/types" "^7.18.2" + "@jridgewell/gen-mapping" "^0.3.0" + jsesc "^2.5.1" + +"@babel/helper-compilation-targets@^7.18.2": + version "7.18.2" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.18.2.tgz#67a85a10cbd5fc7f1457fec2e7f45441dc6c754b" + integrity sha512-s1jnPotJS9uQnzFtiZVBUxe67CuBa679oWFHpxYYnTpRL/1ffhyX44R9uYiXoa/pLXcY9H2moJta0iaanlk/rQ== + dependencies: + "@babel/compat-data" "^7.17.10" + "@babel/helper-validator-option" "^7.16.7" + browserslist "^4.20.2" + semver "^6.3.0" + +"@babel/helper-environment-visitor@^7.16.7", "@babel/helper-environment-visitor@^7.18.2": + version "7.18.2" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.2.tgz#8a6d2dedb53f6bf248e31b4baf38739ee4a637bd" + integrity sha512-14GQKWkX9oJzPiQQ7/J36FTXcD4kSp8egKjO9nINlSKiHITRA9q/R74qu8S9xlc/b/yjsJItQUeeh3xnGN0voQ== + +"@babel/helper-function-name@^7.17.9": + version "7.17.9" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.17.9.tgz#136fcd54bc1da82fcb47565cf16fd8e444b1ff12" + integrity sha512-7cRisGlVtiVqZ0MW0/yFB4atgpGLWEHUVYnb448hZK4x+vih0YO5UoS11XIYtZYqHd0dIPMdUSv8q5K4LdMnIg== + dependencies: + "@babel/template" "^7.16.7" + "@babel/types" "^7.17.0" + +"@babel/helper-hoist-variables@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.7.tgz#86bcb19a77a509c7b77d0e22323ef588fa58c246" + integrity sha512-m04d/0Op34H5v7pbZw6pSKP7weA6lsMvfiIAMeIvkY/R4xQtBSMFEigu9QTZ2qB/9l22vsxtM8a+Q8CzD255fg== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-module-imports@^7.12.13", "@babel/helper-module-imports@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.16.7.tgz#25612a8091a999704461c8a222d0efec5d091437" + integrity sha512-LVtS6TqjJHFc+nYeITRo6VLXve70xmq7wPhWTqDJusJEgGmkAACWwMiTNrvfoQo6hEhFwAIixNkvB0jPXDL8Wg== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-module-transforms@^7.18.0": + version "7.18.0" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.18.0.tgz#baf05dec7a5875fb9235bd34ca18bad4e21221cd" + integrity sha512-kclUYSUBIjlvnzN2++K9f2qzYKFgjmnmjwL4zlmU5f8ZtzgWe8s0rUPSTGy2HmK4P8T52MQsS+HTQAgZd3dMEA== + dependencies: + "@babel/helper-environment-visitor" "^7.16.7" + "@babel/helper-module-imports" "^7.16.7" + "@babel/helper-simple-access" "^7.17.7" + "@babel/helper-split-export-declaration" "^7.16.7" + "@babel/helper-validator-identifier" "^7.16.7" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.18.0" + "@babel/types" "^7.18.0" + +"@babel/helper-plugin-utils@^7.17.12": + version "7.17.12" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.17.12.tgz#86c2347da5acbf5583ba0a10aed4c9bf9da9cf96" + integrity sha512-JDkf04mqtN3y4iAbO1hv9U2ARpPyPL1zqyWs/2WG1pgSq9llHFjStX5jdxb84himgJm+8Ng+x0oiWF/nw/XQKA== + +"@babel/helper-simple-access@^7.17.7": + version "7.18.2" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.18.2.tgz#4dc473c2169ac3a1c9f4a51cfcd091d1c36fcff9" + integrity sha512-7LIrjYzndorDY88MycupkpQLKS1AFfsVRm2k/9PtKScSy5tZq0McZTj+DiMRynboZfIqOKvo03pmhTaUgiD6fQ== + dependencies: + "@babel/types" "^7.18.2" + +"@babel/helper-split-export-declaration@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.7.tgz#0b648c0c42da9d3920d85ad585f2778620b8726b" + integrity sha512-xbWoy/PFoxSWazIToT9Sif+jJTlrMcndIsaOKvTA6u7QEo7ilkRZpjew18/W3c7nm8fXdUDXh02VXTbZ0pGDNw== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-validator-identifier@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz#e8c602438c4a8195751243da9031d1607d247cad" + integrity sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw== + +"@babel/helper-validator-option@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz#b203ce62ce5fe153899b617c08957de860de4d23" + integrity sha512-TRtenOuRUVo9oIQGPC5G9DgK4743cdxvtOw0weQNpZXaS16SCBi5MNjZF8vba3ETURjZpTbVn7Vvcf2eAwFozQ== + +"@babel/helpers@^7.18.2": + version "7.18.2" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.18.2.tgz#970d74f0deadc3f5a938bfa250738eb4ac889384" + integrity sha512-j+d+u5xT5utcQSzrh9p+PaJX94h++KN+ng9b9WEJq7pkUPAd61FGqhjuUEdfknb3E/uDBb7ruwEeKkIxNJPIrg== + dependencies: + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.18.2" + "@babel/types" "^7.18.2" + +"@babel/highlight@^7.16.7": + version "7.17.12" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.17.12.tgz#257de56ee5afbd20451ac0a75686b6b404257351" + integrity sha512-7yykMVF3hfZY2jsHZEEgLc+3x4o1O+fYyULu11GynEUQNwB6lua+IIQn1FiJxNucd5UlyJryrwsOh8PL9Sn8Qg== + dependencies: + "@babel/helper-validator-identifier" "^7.16.7" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@babel/parser@^7.16.7", "@babel/parser@^7.18.5": + version "7.18.5" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.18.5.tgz#337062363436a893a2d22faa60be5bb37091c83c" + integrity sha512-YZWVaglMiplo7v8f1oMQ5ZPQr0vn7HPeZXxXWsxXJRjGVrzUFn9OxFQl1sb5wzfootjA/yChhW84BV+383FSOw== + +"@babel/plugin-syntax-jsx@^7.12.13": + version "7.17.12" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.17.12.tgz#834035b45061983a491f60096f61a2e7c5674a47" + integrity sha512-spyY3E3AURfxh/RHtjx5j6hs8am5NbUBGfcZ2vB3uShSpZdQyXSf5rR5Mk76vbtlAZOelyVQ71Fg0x9SG4fsog== + dependencies: + "@babel/helper-plugin-utils" "^7.17.12" + +"@babel/runtime@^7.0.0", "@babel/runtime@^7.13.10", "@babel/runtime@^7.17.2", "@babel/runtime@^7.3.1", "@babel/runtime@^7.5.5", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.3", "@babel/runtime@^7.8.7": + version "7.18.3" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.18.3.tgz#c7b654b57f6f63cf7f8b418ac9ca04408c4579f4" + integrity sha512-38Y8f7YUhce/K7RMwTp7m0uCumpv9hZkitCbBClqQIow1qSbCvGkcegKOXpEWCQLfWmevgRiWokZ1GkpfhbZug== + dependencies: + regenerator-runtime "^0.13.4" + +"@babel/template@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.7.tgz#8d126c8701fde4d66b264b3eba3d96f07666d155" + integrity sha512-I8j/x8kHUrbYRTUxXrrMbfCa7jxkE7tZre39x3kjr9hvI82cK1FfqLygotcWN5kdPGWcLdWMHpSBavse5tWw3w== + dependencies: + "@babel/code-frame" "^7.16.7" + "@babel/parser" "^7.16.7" + "@babel/types" "^7.16.7" + +"@babel/traverse@^7.18.0", "@babel/traverse@^7.18.2", "@babel/traverse@^7.18.5": + version "7.18.5" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.18.5.tgz#94a8195ad9642801837988ab77f36e992d9a20cd" + integrity sha512-aKXj1KT66sBj0vVzk6rEeAO6Z9aiiQ68wfDgge3nHhA/my6xMM/7HGQUNumKZaoa2qUPQ5whJG9aAifsxUKfLA== + dependencies: + "@babel/code-frame" "^7.16.7" + "@babel/generator" "^7.18.2" + "@babel/helper-environment-visitor" "^7.18.2" + "@babel/helper-function-name" "^7.17.9" + "@babel/helper-hoist-variables" "^7.16.7" + "@babel/helper-split-export-declaration" "^7.16.7" + "@babel/parser" "^7.18.5" + "@babel/types" "^7.18.4" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@^7.16.7", "@babel/types@^7.17.0", "@babel/types@^7.18.0", "@babel/types@^7.18.2", "@babel/types@^7.18.4": + version "7.18.4" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.18.4.tgz#27eae9b9fd18e9dccc3f9d6ad051336f307be354" + integrity sha512-ThN1mBcMq5pG/Vm2IcBmPPfyPXbd8S02rS+OBIDENdufvqC7Z/jHPCv9IcP01277aKtDI8g/2XysBN4hA8niiw== + dependencies: + "@babel/helper-validator-identifier" "^7.16.7" + to-fast-properties "^2.0.0" + +"@date-io/core@^2.14.0": + version "2.14.0" + resolved "https://registry.yarnpkg.com/@date-io/core/-/core-2.14.0.tgz#03e9b9b9fc8e4d561c32dd324df0f3ccd967ef14" + integrity sha512-qFN64hiFjmlDHJhu+9xMkdfDG2jLsggNxKXglnekUpXSq8faiqZgtHm2lsHCUuaPDTV6wuXHcCl8J1GQ5wLmPw== + +"@date-io/date-fns@^2.11.0": + version "2.14.0" + resolved "https://registry.yarnpkg.com/@date-io/date-fns/-/date-fns-2.14.0.tgz#92ab150f488f294c135c873350d154803cebdbea" + integrity sha512-4fJctdVyOd5cKIKGaWUM+s3MUXMuzkZaHuTY15PH70kU1YTMrCoauA7hgQVx9qj0ZEbGrH9VSPYJYnYro7nKiA== + dependencies: + "@date-io/core" "^2.14.0" + +"@date-io/dayjs@^2.11.0": + version "2.14.0" + resolved "https://registry.yarnpkg.com/@date-io/dayjs/-/dayjs-2.14.0.tgz#8d4e93e1d473bb5f25210866204dc33384ca4c20" + integrity sha512-4fRvNWaOh7AjvOyJ4h6FYMS7VHLQnIEeAV5ahv6sKYWx+1g1UwYup8h7+gPuoF+sW2hTScxi7PVaba2Jk/U8Og== + dependencies: + "@date-io/core" "^2.14.0" + +"@date-io/luxon@^2.11.1": + version "2.14.0" + resolved "https://registry.yarnpkg.com/@date-io/luxon/-/luxon-2.14.0.tgz#cd1641229e00a899625895de3a31e3aaaf66629f" + integrity sha512-KmpBKkQFJ/YwZgVd0T3h+br/O0uL9ZdE7mn903VPAG2ZZncEmaUfUdYKFT7v7GyIKJ4KzCp379CRthEbxevEVg== + dependencies: + "@date-io/core" "^2.14.0" + +"@date-io/moment@^2.11.0": + version "2.14.0" + resolved "https://registry.yarnpkg.com/@date-io/moment/-/moment-2.14.0.tgz#8300abd6ae8c55d8edee90d118db3cef0b1d4f58" + integrity sha512-VsoLXs94GsZ49ecWuvFbsa081zEv2xxG7d+izJsqGa2L8RPZLlwk27ANh87+SNnOUpp+qy2AoCAf0mx4XXhioA== + dependencies: + "@date-io/core" "^2.14.0" + +"@emotion/babel-plugin@^11.7.1": + version "11.9.2" + resolved "https://registry.yarnpkg.com/@emotion/babel-plugin/-/babel-plugin-11.9.2.tgz#723b6d394c89fb2ef782229d92ba95a740576e95" + integrity sha512-Pr/7HGH6H6yKgnVFNEj2MVlreu3ADqftqjqwUvDy/OJzKFgxKeTQ+eeUf20FOTuHVkDON2iNa25rAXVYtWJCjw== + dependencies: + "@babel/helper-module-imports" "^7.12.13" + "@babel/plugin-syntax-jsx" "^7.12.13" + "@babel/runtime" "^7.13.10" + "@emotion/hash" "^0.8.0" + "@emotion/memoize" "^0.7.5" + "@emotion/serialize" "^1.0.2" + babel-plugin-macros "^2.6.1" + convert-source-map "^1.5.0" + escape-string-regexp "^4.0.0" + find-root "^1.1.0" + source-map "^0.5.7" + stylis "4.0.13" + +"@emotion/cache@^11.7.1", "@emotion/cache@^11.9.3": + version "11.9.3" + resolved "https://registry.yarnpkg.com/@emotion/cache/-/cache-11.9.3.tgz#96638449f6929fd18062cfe04d79b29b44c0d6cb" + integrity sha512-0dgkI/JKlCXa+lEXviaMtGBL0ynpx4osh7rjOXE71q9bIF8G+XhJgvi+wDu0B0IdCVx37BffiwXlN9I3UuzFvg== + dependencies: + "@emotion/memoize" "^0.7.4" + "@emotion/sheet" "^1.1.1" + "@emotion/utils" "^1.0.0" + "@emotion/weak-memoize" "^0.2.5" + stylis "4.0.13" + +"@emotion/hash@^0.8.0": + version "0.8.0" + resolved "https://registry.yarnpkg.com/@emotion/hash/-/hash-0.8.0.tgz#bbbff68978fefdbe68ccb533bc8cbe1d1afb5413" + integrity sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow== + +"@emotion/is-prop-valid@^1.1.2", "@emotion/is-prop-valid@^1.1.3": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@emotion/is-prop-valid/-/is-prop-valid-1.1.3.tgz#f0907a416368cf8df9e410117068e20fe87c0a3a" + integrity sha512-RFg04p6C+1uO19uG8N+vqanzKqiM9eeV1LDOG3bmkYmuOj7NbKNlFC/4EZq5gnwAIlcC/jOT24f8Td0iax2SXA== + dependencies: + "@emotion/memoize" "^0.7.4" + +"@emotion/memoize@^0.7.4", "@emotion/memoize@^0.7.5": + version "0.7.5" + resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.7.5.tgz#2c40f81449a4e554e9fc6396910ed4843ec2be50" + integrity sha512-igX9a37DR2ZPGYtV6suZ6whr8pTFtyHL3K/oLUotxpSVO2ASaprmAe2Dkq7tBo7CRY7MMDrAa9nuQP9/YG8FxQ== + +"@emotion/react@^11.9.3": + version "11.9.3" + resolved "https://registry.yarnpkg.com/@emotion/react/-/react-11.9.3.tgz#f4f4f34444f6654a2e550f5dab4f2d360c101df9" + integrity sha512-g9Q1GcTOlzOEjqwuLF/Zd9LC+4FljjPjDfxSM7KmEakm+hsHXk+bYZ2q+/hTJzr0OUNkujo72pXLQvXj6H+GJQ== + dependencies: + "@babel/runtime" "^7.13.10" + "@emotion/babel-plugin" "^11.7.1" + "@emotion/cache" "^11.9.3" + "@emotion/serialize" "^1.0.4" + "@emotion/utils" "^1.1.0" + "@emotion/weak-memoize" "^0.2.5" + hoist-non-react-statics "^3.3.1" + +"@emotion/serialize@^1.0.2", "@emotion/serialize@^1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@emotion/serialize/-/serialize-1.0.4.tgz#ff31fd11bb07999611199c2229e152faadc21a3c" + integrity sha512-1JHamSpH8PIfFwAMryO2bNka+y8+KA5yga5Ocf2d7ZEiJjb7xlLW7aknBGZqJLajuLOvJ+72vN+IBSwPlXD1Pg== + dependencies: + "@emotion/hash" "^0.8.0" + "@emotion/memoize" "^0.7.4" + "@emotion/unitless" "^0.7.5" + "@emotion/utils" "^1.0.0" + csstype "^3.0.2" + +"@emotion/sheet@^1.1.1": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@emotion/sheet/-/sheet-1.1.1.tgz#015756e2a9a3c7c5f11d8ec22966a8dbfbfac787" + integrity sha512-J3YPccVRMiTZxYAY0IOq3kd+hUP8idY8Kz6B/Cyo+JuXq52Ek+zbPbSQUrVQp95aJ+lsAW7DPL1P2Z+U1jGkKA== + +"@emotion/styled@^11.9.3": + version "11.9.3" + resolved "https://registry.yarnpkg.com/@emotion/styled/-/styled-11.9.3.tgz#47f0c71137fec7c57035bf3659b52fb536792340" + integrity sha512-o3sBNwbtoVz9v7WB1/Y/AmXl69YHmei2mrVnK7JgyBJ//Rst5yqPZCecEJlMlJrFeWHp+ki/54uN265V2pEcXA== + dependencies: + "@babel/runtime" "^7.13.10" + "@emotion/babel-plugin" "^11.7.1" + "@emotion/is-prop-valid" "^1.1.3" + "@emotion/serialize" "^1.0.4" + "@emotion/utils" "^1.1.0" + +"@emotion/unitless@^0.7.5": + version "0.7.5" + resolved "https://registry.yarnpkg.com/@emotion/unitless/-/unitless-0.7.5.tgz#77211291c1900a700b8a78cfafda3160d76949ed" + integrity sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg== + +"@emotion/utils@^1.0.0", "@emotion/utils@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@emotion/utils/-/utils-1.1.0.tgz#86b0b297f3f1a0f2bdb08eeac9a2f49afd40d0cf" + integrity sha512-iRLa/Y4Rs5H/f2nimczYmS5kFJEbpiVvgN3XVfZ022IYhuNA1IRSHEizcof88LtCTXtl9S2Cxt32KgaXEu72JQ== + +"@emotion/weak-memoize@^0.2.5": + version "0.2.5" + resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.2.5.tgz#8eed982e2ee6f7f4e44c253e12962980791efd46" + integrity sha512-6U71C2Wp7r5XtFtQzYrW5iKFT67OixrSxjI4MptCHzdSVlgabczzqLe0ZSgnub/5Kp4hSbpDB1tMytZY9pwxxA== + +"@ethersproject/abi@5.6.4", "@ethersproject/abi@^5.6.3": + version "5.6.4" + resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.6.4.tgz#f6e01b6ed391a505932698ecc0d9e7a99ee60362" + integrity sha512-TTeZUlCeIHG6527/2goZA6gW5F8Emoc7MrZDC7hhP84aRGvW3TEdTnZR08Ls88YXM1m2SuK42Osw/jSi3uO8gg== + dependencies: + "@ethersproject/address" "^5.6.1" + "@ethersproject/bignumber" "^5.6.2" + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/constants" "^5.6.1" + "@ethersproject/hash" "^5.6.1" + "@ethersproject/keccak256" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + "@ethersproject/properties" "^5.6.0" + "@ethersproject/strings" "^5.6.1" + +"@ethersproject/abstract-provider@5.6.1", "@ethersproject/abstract-provider@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.6.1.tgz#02ddce150785caf0c77fe036a0ebfcee61878c59" + integrity sha512-BxlIgogYJtp1FS8Muvj8YfdClk3unZH0vRMVX791Z9INBNT/kuACZ9GzaY1Y4yFq+YSy6/w4gzj3HCRKrK9hsQ== + dependencies: + "@ethersproject/bignumber" "^5.6.2" + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + "@ethersproject/networks" "^5.6.3" + "@ethersproject/properties" "^5.6.0" + "@ethersproject/transactions" "^5.6.2" + "@ethersproject/web" "^5.6.1" + +"@ethersproject/abstract-signer@5.6.2", "@ethersproject/abstract-signer@^5.6.2": + version "5.6.2" + resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.6.2.tgz#491f07fc2cbd5da258f46ec539664713950b0b33" + integrity sha512-n1r6lttFBG0t2vNiI3HoWaS/KdOt8xyDjzlP2cuevlWLG6EX0OwcKLyG/Kp/cuwNxdy/ous+R/DEMdTUwWQIjQ== + dependencies: + "@ethersproject/abstract-provider" "^5.6.1" + "@ethersproject/bignumber" "^5.6.2" + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + "@ethersproject/properties" "^5.6.0" + +"@ethersproject/address@5.6.1", "@ethersproject/address@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.6.1.tgz#ab57818d9aefee919c5721d28cd31fd95eff413d" + integrity sha512-uOgF0kS5MJv9ZvCz7x6T2EXJSzotiybApn4XlOgoTX0xdtyVIJ7pF+6cGPxiEq/dpBiTfMiw7Yc81JcwhSYA0Q== + dependencies: + "@ethersproject/bignumber" "^5.6.2" + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/keccak256" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + "@ethersproject/rlp" "^5.6.1" + +"@ethersproject/base64@5.6.1", "@ethersproject/base64@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.6.1.tgz#2c40d8a0310c9d1606c2c37ae3092634b41d87cb" + integrity sha512-qB76rjop6a0RIYYMiB4Eh/8n+Hxu2NIZm8S/Q7kNo5pmZfXhHGHmS4MinUainiBC54SCyRnwzL+KZjj8zbsSsw== + dependencies: + "@ethersproject/bytes" "^5.6.1" + +"@ethersproject/basex@5.6.1", "@ethersproject/basex@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/basex/-/basex-5.6.1.tgz#badbb2f1d4a6f52ce41c9064f01eab19cc4c5305" + integrity sha512-a52MkVz4vuBXR06nvflPMotld1FJWSj2QT0985v7P/emPZO00PucFAkbcmq2vpVU7Ts7umKiSI6SppiLykVWsA== + dependencies: + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/properties" "^5.6.0" + +"@ethersproject/bignumber@5.6.2", "@ethersproject/bignumber@^5.6.2": + version "5.6.2" + resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.6.2.tgz#72a0717d6163fab44c47bcc82e0c550ac0315d66" + integrity sha512-v7+EEUbhGqT3XJ9LMPsKvXYHFc8eHxTowFCG/HgJErmq4XHJ2WR7aeyICg3uTOAQ7Icn0GFHAohXEhxQHq4Ubw== + dependencies: + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + bn.js "^5.2.1" + +"@ethersproject/bytes@5.6.1", "@ethersproject/bytes@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.6.1.tgz#24f916e411f82a8a60412344bf4a813b917eefe7" + integrity sha512-NwQt7cKn5+ZE4uDn+X5RAXLp46E1chXoaMmrxAyA0rblpxz8t58lVkrHXoRIn0lz1joQElQ8410GqhTqMOwc6g== + dependencies: + "@ethersproject/logger" "^5.6.0" + +"@ethersproject/constants@5.6.1", "@ethersproject/constants@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.6.1.tgz#e2e974cac160dd101cf79fdf879d7d18e8cb1370" + integrity sha512-QSq9WVnZbxXYFftrjSjZDUshp6/eKp6qrtdBtUCm0QxCV5z1fG/w3kdlcsjMCQuQHUnAclKoK7XpXMezhRDOLg== + dependencies: + "@ethersproject/bignumber" "^5.6.2" + +"@ethersproject/contracts@5.6.2": + version "5.6.2" + resolved "https://registry.yarnpkg.com/@ethersproject/contracts/-/contracts-5.6.2.tgz#20b52e69ebc1b74274ff8e3d4e508de971c287bc" + integrity sha512-hguUA57BIKi6WY0kHvZp6PwPlWF87MCeB4B7Z7AbUpTxfFXFdn/3b0GmjZPagIHS+3yhcBJDnuEfU4Xz+Ks/8g== + dependencies: + "@ethersproject/abi" "^5.6.3" + "@ethersproject/abstract-provider" "^5.6.1" + "@ethersproject/abstract-signer" "^5.6.2" + "@ethersproject/address" "^5.6.1" + "@ethersproject/bignumber" "^5.6.2" + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/constants" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + "@ethersproject/properties" "^5.6.0" + "@ethersproject/transactions" "^5.6.2" + +"@ethersproject/hash@5.6.1", "@ethersproject/hash@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.6.1.tgz#224572ea4de257f05b4abf8ae58b03a67e99b0f4" + integrity sha512-L1xAHurbaxG8VVul4ankNX5HgQ8PNCTrnVXEiFnE9xoRnaUcgfD12tZINtDinSllxPLCtGwguQxJ5E6keE84pA== + dependencies: + "@ethersproject/abstract-signer" "^5.6.2" + "@ethersproject/address" "^5.6.1" + "@ethersproject/bignumber" "^5.6.2" + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/keccak256" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + "@ethersproject/properties" "^5.6.0" + "@ethersproject/strings" "^5.6.1" + +"@ethersproject/hdnode@5.6.2", "@ethersproject/hdnode@^5.6.2": + version "5.6.2" + resolved "https://registry.yarnpkg.com/@ethersproject/hdnode/-/hdnode-5.6.2.tgz#26f3c83a3e8f1b7985c15d1db50dc2903418b2d2" + integrity sha512-tERxW8Ccf9CxW2db3WsN01Qao3wFeRsfYY9TCuhmG0xNpl2IO8wgXU3HtWIZ49gUWPggRy4Yg5axU0ACaEKf1Q== + dependencies: + "@ethersproject/abstract-signer" "^5.6.2" + "@ethersproject/basex" "^5.6.1" + "@ethersproject/bignumber" "^5.6.2" + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + "@ethersproject/pbkdf2" "^5.6.1" + "@ethersproject/properties" "^5.6.0" + "@ethersproject/sha2" "^5.6.1" + "@ethersproject/signing-key" "^5.6.2" + "@ethersproject/strings" "^5.6.1" + "@ethersproject/transactions" "^5.6.2" + "@ethersproject/wordlists" "^5.6.1" + +"@ethersproject/json-wallets@5.6.1", "@ethersproject/json-wallets@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/json-wallets/-/json-wallets-5.6.1.tgz#3f06ba555c9c0d7da46756a12ac53483fe18dd91" + integrity sha512-KfyJ6Zwz3kGeX25nLihPwZYlDqamO6pfGKNnVMWWfEVVp42lTfCZVXXy5Ie8IZTN0HKwAngpIPi7gk4IJzgmqQ== + dependencies: + "@ethersproject/abstract-signer" "^5.6.2" + "@ethersproject/address" "^5.6.1" + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/hdnode" "^5.6.2" + "@ethersproject/keccak256" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + "@ethersproject/pbkdf2" "^5.6.1" + "@ethersproject/properties" "^5.6.0" + "@ethersproject/random" "^5.6.1" + "@ethersproject/strings" "^5.6.1" + "@ethersproject/transactions" "^5.6.2" + aes-js "3.0.0" + scrypt-js "3.0.1" + +"@ethersproject/keccak256@5.6.1", "@ethersproject/keccak256@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.6.1.tgz#b867167c9b50ba1b1a92bccdd4f2d6bd168a91cc" + integrity sha512-bB7DQHCTRDooZZdL3lk9wpL0+XuG3XLGHLh3cePnybsO3V0rdCAOQGpn/0R3aODmnTOOkCATJiD2hnL+5bwthA== + dependencies: + "@ethersproject/bytes" "^5.6.1" + js-sha3 "0.8.0" + +"@ethersproject/logger@5.6.0", "@ethersproject/logger@^5.6.0": + version "5.6.0" + resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.6.0.tgz#d7db1bfcc22fd2e4ab574cba0bb6ad779a9a3e7a" + integrity sha512-BiBWllUROH9w+P21RzoxJKzqoqpkyM1pRnEKG69bulE9TSQD8SAIvTQqIMZmmCO8pUNkgLP1wndX1gKghSpBmg== + +"@ethersproject/networks@5.6.4", "@ethersproject/networks@^5.6.3": + version "5.6.4" + resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.6.4.tgz#51296d8fec59e9627554f5a8a9c7791248c8dc07" + integrity sha512-KShHeHPahHI2UlWdtDMn2lJETcbtaJge4k7XSjDR9h79QTd6yQJmv6Cp2ZA4JdqWnhszAOLSuJEd9C0PRw7hSQ== + dependencies: + "@ethersproject/logger" "^5.6.0" + +"@ethersproject/pbkdf2@5.6.1", "@ethersproject/pbkdf2@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.6.1.tgz#f462fe320b22c0d6b1d72a9920a3963b09eb82d1" + integrity sha512-k4gRQ+D93zDRPNUfmduNKq065uadC2YjMP/CqwwX5qG6R05f47boq6pLZtV/RnC4NZAYOPH1Cyo54q0c9sshRQ== + dependencies: + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/sha2" "^5.6.1" + +"@ethersproject/properties@5.6.0", "@ethersproject/properties@^5.6.0": + version "5.6.0" + resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.6.0.tgz#38904651713bc6bdd5bdd1b0a4287ecda920fa04" + integrity sha512-szoOkHskajKePTJSZ46uHUWWkbv7TzP2ypdEK6jGMqJaEt2sb0jCgfBo0gH0m2HBpRixMuJ6TBRaQCF7a9DoCg== + dependencies: + "@ethersproject/logger" "^5.6.0" + +"@ethersproject/providers@5.6.8": + version "5.6.8" + resolved "https://registry.yarnpkg.com/@ethersproject/providers/-/providers-5.6.8.tgz#22e6c57be215ba5545d3a46cf759d265bb4e879d" + integrity sha512-Wf+CseT/iOJjrGtAOf3ck9zS7AgPmr2fZ3N97r4+YXN3mBePTG2/bJ8DApl9mVwYL+RpYbNxMEkEp4mPGdwG/w== + dependencies: + "@ethersproject/abstract-provider" "^5.6.1" + "@ethersproject/abstract-signer" "^5.6.2" + "@ethersproject/address" "^5.6.1" + "@ethersproject/base64" "^5.6.1" + "@ethersproject/basex" "^5.6.1" + "@ethersproject/bignumber" "^5.6.2" + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/constants" "^5.6.1" + "@ethersproject/hash" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + "@ethersproject/networks" "^5.6.3" + "@ethersproject/properties" "^5.6.0" + "@ethersproject/random" "^5.6.1" + "@ethersproject/rlp" "^5.6.1" + "@ethersproject/sha2" "^5.6.1" + "@ethersproject/strings" "^5.6.1" + "@ethersproject/transactions" "^5.6.2" + "@ethersproject/web" "^5.6.1" + bech32 "1.1.4" + ws "7.4.6" + +"@ethersproject/random@5.6.1", "@ethersproject/random@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/random/-/random-5.6.1.tgz#66915943981bcd3e11bbd43733f5c3ba5a790255" + integrity sha512-/wtPNHwbmng+5yi3fkipA8YBT59DdkGRoC2vWk09Dci/q5DlgnMkhIycjHlavrvrjJBkFjO/ueLyT+aUDfc4lA== + dependencies: + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + +"@ethersproject/rlp@5.6.1", "@ethersproject/rlp@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.6.1.tgz#df8311e6f9f24dcb03d59a2bac457a28a4fe2bd8" + integrity sha512-uYjmcZx+DKlFUk7a5/W9aQVaoEC7+1MOBgNtvNg13+RnuUwT4F0zTovC0tmay5SmRslb29V1B7Y5KCri46WhuQ== + dependencies: + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + +"@ethersproject/sha2@5.6.1", "@ethersproject/sha2@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/sha2/-/sha2-5.6.1.tgz#211f14d3f5da5301c8972a8827770b6fd3e51656" + integrity sha512-5K2GyqcW7G4Yo3uenHegbXRPDgARpWUiXc6RiF7b6i/HXUoWlb7uCARh7BAHg7/qT/Q5ydofNwiZcim9qpjB6g== + dependencies: + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + hash.js "1.1.7" + +"@ethersproject/signing-key@5.6.2", "@ethersproject/signing-key@^5.6.2": + version "5.6.2" + resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.6.2.tgz#8a51b111e4d62e5a62aee1da1e088d12de0614a3" + integrity sha512-jVbu0RuP7EFpw82vHcL+GP35+KaNruVAZM90GxgQnGqB6crhBqW/ozBfFvdeImtmb4qPko0uxXjn8l9jpn0cwQ== + dependencies: + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + "@ethersproject/properties" "^5.6.0" + bn.js "^5.2.1" + elliptic "6.5.4" + hash.js "1.1.7" + +"@ethersproject/solidity@5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/solidity/-/solidity-5.6.1.tgz#5845e71182c66d32e6ec5eefd041fca091a473e2" + integrity sha512-KWqVLkUUoLBfL1iwdzUVlkNqAUIFMpbbeH0rgCfKmJp0vFtY4AsaN91gHKo9ZZLkC4UOm3cI3BmMV4N53BOq4g== + dependencies: + "@ethersproject/bignumber" "^5.6.2" + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/keccak256" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + "@ethersproject/sha2" "^5.6.1" + "@ethersproject/strings" "^5.6.1" + +"@ethersproject/strings@5.6.1", "@ethersproject/strings@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.6.1.tgz#dbc1b7f901db822b5cafd4ebf01ca93c373f8952" + integrity sha512-2X1Lgk6Jyfg26MUnsHiT456U9ijxKUybz8IM1Vih+NJxYtXhmvKBcHOmvGqpFSVJ0nQ4ZCoIViR8XlRw1v/+Cw== + dependencies: + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/constants" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + +"@ethersproject/transactions@5.6.2", "@ethersproject/transactions@^5.6.2": + version "5.6.2" + resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.6.2.tgz#793a774c01ced9fe7073985bb95a4b4e57a6370b" + integrity sha512-BuV63IRPHmJvthNkkt9G70Ullx6AcM+SDc+a8Aw/8Yew6YwT51TcBKEp1P4oOQ/bP25I18JJr7rcFRgFtU9B2Q== + dependencies: + "@ethersproject/address" "^5.6.1" + "@ethersproject/bignumber" "^5.6.2" + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/constants" "^5.6.1" + "@ethersproject/keccak256" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + "@ethersproject/properties" "^5.6.0" + "@ethersproject/rlp" "^5.6.1" + "@ethersproject/signing-key" "^5.6.2" + +"@ethersproject/units@5.6.1", "@ethersproject/units@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/units/-/units-5.6.1.tgz#ecc590d16d37c8f9ef4e89e2005bda7ddc6a4e6f" + integrity sha512-rEfSEvMQ7obcx3KWD5EWWx77gqv54K6BKiZzKxkQJqtpriVsICrktIQmKl8ReNToPeIYPnFHpXvKpi068YFZXw== + dependencies: + "@ethersproject/bignumber" "^5.6.2" + "@ethersproject/constants" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + +"@ethersproject/wallet@5.6.2": + version "5.6.2" + resolved "https://registry.yarnpkg.com/@ethersproject/wallet/-/wallet-5.6.2.tgz#cd61429d1e934681e413f4bc847a5f2f87e3a03c" + integrity sha512-lrgh0FDQPuOnHcF80Q3gHYsSUODp6aJLAdDmDV0xKCN/T7D99ta1jGVhulg3PY8wiXEngD0DfM0I2XKXlrqJfg== + dependencies: + "@ethersproject/abstract-provider" "^5.6.1" + "@ethersproject/abstract-signer" "^5.6.2" + "@ethersproject/address" "^5.6.1" + "@ethersproject/bignumber" "^5.6.2" + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/hash" "^5.6.1" + "@ethersproject/hdnode" "^5.6.2" + "@ethersproject/json-wallets" "^5.6.1" + "@ethersproject/keccak256" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + "@ethersproject/properties" "^5.6.0" + "@ethersproject/random" "^5.6.1" + "@ethersproject/signing-key" "^5.6.2" + "@ethersproject/transactions" "^5.6.2" + "@ethersproject/wordlists" "^5.6.1" + +"@ethersproject/web@5.6.1", "@ethersproject/web@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.6.1.tgz#6e2bd3ebadd033e6fe57d072db2b69ad2c9bdf5d" + integrity sha512-/vSyzaQlNXkO1WV+RneYKqCJwualcUdx/Z3gseVovZP0wIlOFcCE1hkRhKBH8ImKbGQbMl9EAAyJFrJu7V0aqA== + dependencies: + "@ethersproject/base64" "^5.6.1" + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + "@ethersproject/properties" "^5.6.0" + "@ethersproject/strings" "^5.6.1" + +"@ethersproject/wordlists@5.6.1", "@ethersproject/wordlists@^5.6.1": + version "5.6.1" + resolved "https://registry.yarnpkg.com/@ethersproject/wordlists/-/wordlists-5.6.1.tgz#1e78e2740a8a21e9e99947e47979d72e130aeda1" + integrity sha512-wiPRgBpNbNwCQFoCr8bcWO8o5I810cqO6mkdtKfLKFlLxeCWcnzDi4Alu8iyNzlhYuS9npCwivMbRWF19dyblw== + dependencies: + "@ethersproject/bytes" "^5.6.1" + "@ethersproject/hash" "^5.6.1" + "@ethersproject/logger" "^5.6.0" + "@ethersproject/properties" "^5.6.0" + "@ethersproject/strings" "^5.6.1" + +"@jridgewell/gen-mapping@^0.1.0": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" + integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/gen-mapping@^0.3.0": + version "0.3.1" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.1.tgz#cf92a983c83466b8c0ce9124fadeaf09f7c66ea9" + integrity sha512-GcHwniMlA2z+WFPWuY8lp3fsza0I8xPFMWL5+n8LYyP6PSvPrXf4+n8stDHZY2DM0zy9sVkRDy1jDI4XGzYVqg== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/resolve-uri@^3.0.3": + version "3.0.7" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.0.7.tgz#30cd49820a962aff48c8fffc5cd760151fca61fe" + integrity sha512-8cXDaBBHOr2pQ7j77Y6Vp5VDT2sIqWyWQ56TjEq4ih/a4iST3dItRe8Q9fp0rrIl9DoKhWQtUQz/YpOxLkXbNA== + +"@jridgewell/set-array@^1.0.0": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.1.tgz#36a6acc93987adcf0ba50c66908bd0b70de8afea" + integrity sha512-Ct5MqZkLGEXTVmQYbGtx9SVqD2fqwvdubdps5D3djjAkgkKwT918VNOz65pEHFaYTeWcukmJmH5SwsA9Tn2ObQ== + +"@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.13" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.13.tgz#b6461fb0c2964356c469e115f504c95ad97ab88c" + integrity sha512-GryiOJmNcWbovBxTfZSF71V/mXbgcV3MewDe3kIMCLyIh5e7SKAeUZs+rMnJ8jkMolZ/4/VsdBmMrw3l+VdZ3w== + +"@jridgewell/trace-mapping@^0.3.9": + version "0.3.13" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.13.tgz#dcfe3e95f224c8fe97a87a5235defec999aa92ea" + integrity sha512-o1xbKhp9qnIAoHJSWd6KlCZfqslL4valSF81H8ImioOAxluWYWOpWkpyktY2vnt4tbrX9XYaxovq6cgowaJp2w== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@metamask/detect-provider@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@metamask/detect-provider/-/detect-provider-1.2.0.tgz#3667a7531f2a682e3c3a43eaf3a1958bdb42a696" + integrity sha512-ocA76vt+8D0thgXZ7LxFPyqw3H7988qblgzddTDA6B8a/yU0uKV42QR/DhA+Jh11rJjxW0jKvwb5htA6krNZDQ== + +"@mui/base@5.0.0-alpha.85": + version "5.0.0-alpha.85" + resolved "https://registry.yarnpkg.com/@mui/base/-/base-5.0.0-alpha.85.tgz#e9e19678bf72dae228d0f25d33dfe20462aac833" + integrity sha512-ONlQJOmQrxmR+pYF9AqH69FOG4ofwzVzNltwb2xKAQIW3VbsNZahcHIpzhFd70W6EIU+QHzB9TzamSM+Fg/U7w== + dependencies: + "@babel/runtime" "^7.17.2" + "@emotion/is-prop-valid" "^1.1.2" + "@mui/types" "^7.1.4" + "@mui/utils" "^5.8.4" + "@popperjs/core" "^2.11.5" + clsx "^1.1.1" + prop-types "^15.8.1" + react-is "^17.0.2" + +"@mui/icons-material@^5.8.3": + version "5.8.4" + resolved "https://registry.yarnpkg.com/@mui/icons-material/-/icons-material-5.8.4.tgz#3f2907c9f8f5ce4d754cb8fb4b68b5a1abf4d095" + integrity sha512-9Z/vyj2szvEhGWDvb+gG875bOGm8b8rlHBKOD1+nA3PcgC3fV6W1AU6pfOorPeBfH2X4mb9Boe97vHvaSndQvA== + dependencies: + "@babel/runtime" "^7.17.2" + +"@mui/lab@^5.0.0-alpha.85": + version "5.0.0-alpha.86" + resolved "https://registry.yarnpkg.com/@mui/lab/-/lab-5.0.0-alpha.86.tgz#83323e0ff17fdea641fa1d93be024413bf407ec3" + integrity sha512-5dx9/vHldiE5KFu99YUtEGKyUgwTiq8wM+IhEnNKkU+YjEMULVYV+mgS9nvnf6laKtgqy2hOE4JivqRPIuOGdA== + dependencies: + "@babel/runtime" "^7.17.2" + "@mui/base" "5.0.0-alpha.85" + "@mui/system" "^5.8.4" + "@mui/utils" "^5.8.4" + "@mui/x-date-pickers" "5.0.0-alpha.1" + clsx "^1.1.1" + prop-types "^15.8.1" + react-is "^17.0.2" + react-transition-group "^4.4.2" + rifm "^0.12.1" + +"@mui/material@^5.8.3": + version "5.8.4" + resolved "https://registry.yarnpkg.com/@mui/material/-/material-5.8.4.tgz#b9cdae0c79ea770bc9cc3aafb7f750ed8ebe1b5d" + integrity sha512-KlOJS1JGhwuhdoF4fulmz41h/YxyMdZSc+ncz+HAah0GKn8ovAs5774f1w0lIasxbtI1Ziunwvmnu9PvvUKdMw== + dependencies: + "@babel/runtime" "^7.17.2" + "@mui/base" "5.0.0-alpha.85" + "@mui/system" "^5.8.4" + "@mui/types" "^7.1.4" + "@mui/utils" "^5.8.4" + "@types/react-transition-group" "^4.4.4" + clsx "^1.1.1" + csstype "^3.1.0" + prop-types "^15.8.1" + react-is "^17.0.2" + react-transition-group "^4.4.2" + +"@mui/private-theming@^5.8.4": + version "5.8.4" + resolved "https://registry.yarnpkg.com/@mui/private-theming/-/private-theming-5.8.4.tgz#8ff896601cf84eb9f8394db7674ee4dd2a3343f7" + integrity sha512-3Lp0VAEjtQygJ70MWEyHkKvg327O6YoBH6ZNEy6fIsrK6gmRIj+YrlvJ7LQCbowY+qDGnbdMrTBd1hfThlI8lg== + dependencies: + "@babel/runtime" "^7.17.2" + "@mui/utils" "^5.8.4" + prop-types "^15.8.1" + +"@mui/styled-engine@^5.8.0": + version "5.8.0" + resolved "https://registry.yarnpkg.com/@mui/styled-engine/-/styled-engine-5.8.0.tgz#89ed42efe7c8749e5a60af035bc5d3a6bea362bf" + integrity sha512-Q3spibB8/EgeMYHc+/o3RRTnAYkSl7ROCLhXJ830W8HZ2/iDiyYp16UcxKPurkXvLhUaILyofPVrP3Su2uKsAw== + dependencies: + "@babel/runtime" "^7.17.2" + "@emotion/cache" "^11.7.1" + prop-types "^15.8.1" + +"@mui/styles@^5.8.3": + version "5.8.4" + resolved "https://registry.yarnpkg.com/@mui/styles/-/styles-5.8.4.tgz#cc6463df91ad1cc1c035229526f865093bbfc03e" + integrity sha512-Td7dafJDgpdzObT0z5CH/ihOh22MG2vZ7p2tpnrKaq3We50f8l3T69XeTNcy2OH0TWnXJJuASZS/0uMJmVPfag== + dependencies: + "@babel/runtime" "^7.17.2" + "@emotion/hash" "^0.8.0" + "@mui/private-theming" "^5.8.4" + "@mui/types" "^7.1.4" + "@mui/utils" "^5.8.4" + clsx "^1.1.1" + csstype "^3.1.0" + hoist-non-react-statics "^3.3.2" + jss "^10.8.2" + jss-plugin-camel-case "^10.8.2" + jss-plugin-default-unit "^10.8.2" + jss-plugin-global "^10.8.2" + jss-plugin-nested "^10.8.2" + jss-plugin-props-sort "^10.8.2" + jss-plugin-rule-value-function "^10.8.2" + jss-plugin-vendor-prefixer "^10.8.2" + prop-types "^15.8.1" + +"@mui/system@^5.8.4": + version "5.8.4" + resolved "https://registry.yarnpkg.com/@mui/system/-/system-5.8.4.tgz#88306aefcc3a60528f69dcd2d66516831859c328" + integrity sha512-eeYZXlOn4p+tYwqqDlci6wW4knJ68aGx5A24YU9ubYZ5o0IwveoNP3LC9sHAMxigk/mUTqL4bpSMJ2HbTn2aQg== + dependencies: + "@babel/runtime" "^7.17.2" + "@mui/private-theming" "^5.8.4" + "@mui/styled-engine" "^5.8.0" + "@mui/types" "^7.1.4" + "@mui/utils" "^5.8.4" + clsx "^1.1.1" + csstype "^3.1.0" + prop-types "^15.8.1" + +"@mui/types@^7.1.4": + version "7.1.4" + resolved "https://registry.yarnpkg.com/@mui/types/-/types-7.1.4.tgz#4185c05d6df63ec673cda15feab80440abadc764" + integrity sha512-uveM3byMbthO+6tXZ1n2zm0W3uJCQYtwt/v5zV5I77v2v18u0ITkb8xwhsDD2i3V2Kye7SaNR6FFJ6lMuY/WqQ== + +"@mui/utils@^5.4.1", "@mui/utils@^5.6.0", "@mui/utils@^5.8.4": + version "5.8.4" + resolved "https://registry.yarnpkg.com/@mui/utils/-/utils-5.8.4.tgz#5c46b5900bd2452b3ce54a7a1c94a3e2a8a75c34" + integrity sha512-BHYErfrjqqh76KaDAm8wZlhEip1Uj7Cmco65NcsF3BWrAl3FWngACpaPZeEbTgmaEwyWAQEE6LZhsmy43hfyqQ== + dependencies: + "@babel/runtime" "^7.17.2" + "@types/prop-types" "^15.7.5" + "@types/react-is" "^16.7.1 || ^17.0.0" + prop-types "^15.8.1" + react-is "^17.0.2" + +"@mui/x-data-grid@^5.12.1": + version "5.12.2" + resolved "https://registry.yarnpkg.com/@mui/x-data-grid/-/x-data-grid-5.12.2.tgz#e7bde75549ab592ebdafe2d12a2b7f671d484d22" + integrity sha512-OA5jjSoGPrO742GWNSxUPac6U1m8wF0rzcmqlj5vMuBySkPi0ycPRRlVAlYJWTVhSBPs+UWoHA9QpTE19eMBYg== + dependencies: + "@babel/runtime" "^7.17.2" + "@mui/utils" "^5.4.1" + clsx "^1.1.1" + prop-types "^15.8.1" + reselect "^4.1.5" + +"@mui/x-date-pickers@5.0.0-alpha.1": + version "5.0.0-alpha.1" + resolved "https://registry.yarnpkg.com/@mui/x-date-pickers/-/x-date-pickers-5.0.0-alpha.1.tgz#7450b5544b9ed655db41891c74e2c5f652fbedb7" + integrity sha512-dLPkRiIn2Gr0momblxiOnIwrxn4SijVix+8e08mwAGWhiWcmWep1O9XTRDpZsjB0kjHYCf+kZjlRX4dxnj2acg== + dependencies: + "@date-io/date-fns" "^2.11.0" + "@date-io/dayjs" "^2.11.0" + "@date-io/luxon" "^2.11.1" + "@date-io/moment" "^2.11.0" + "@mui/utils" "^5.6.0" + clsx "^1.1.1" + prop-types "^15.7.2" + react-transition-group "^4.4.2" + rifm "^0.12.1" + +"@next/env@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/env/-/env-12.1.6.tgz#5f44823a78335355f00f1687cfc4f1dafa3eca08" + integrity sha512-Te/OBDXFSodPU6jlXYPAXpmZr/AkG6DCATAxttQxqOWaq6eDFX25Db3dK0120GZrSZmv4QCe9KsZmJKDbWs4OA== + +"@next/swc-android-arm-eabi@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-android-arm-eabi/-/swc-android-arm-eabi-12.1.6.tgz#79a35349b98f2f8c038ab6261aa9cd0d121c03f9" + integrity sha512-BxBr3QAAAXWgk/K7EedvzxJr2dE014mghBSA9iOEAv0bMgF+MRq4PoASjuHi15M2zfowpcRG8XQhMFtxftCleQ== + +"@next/swc-android-arm64@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-android-arm64/-/swc-android-arm64-12.1.6.tgz#ec08ea61794f8752c8ebcacbed0aafc5b9407456" + integrity sha512-EboEk3ROYY7U6WA2RrMt/cXXMokUTXXfnxe2+CU+DOahvbrO8QSWhlBl9I9ZbFzJx28AGB9Yo3oQHCvph/4Lew== + +"@next/swc-darwin-arm64@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-12.1.6.tgz#d1053805615fd0706e9b1667893a72271cd87119" + integrity sha512-P0EXU12BMSdNj1F7vdkP/VrYDuCNwBExtRPDYawgSUakzi6qP0iKJpya2BuLvNzXx+XPU49GFuDC5X+SvY0mOw== + +"@next/swc-darwin-x64@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-12.1.6.tgz#2d1b926a22f4c5230d5b311f9c56cfdcc406afec" + integrity sha512-9FptMnbgHJK3dRDzfTpexs9S2hGpzOQxSQbe8omz6Pcl7rnEp9x4uSEKY51ho85JCjL4d0tDLBcXEJZKKLzxNg== + +"@next/swc-linux-arm-gnueabihf@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-linux-arm-gnueabihf/-/swc-linux-arm-gnueabihf-12.1.6.tgz#c021918d2a94a17f823106a5e069335b8a19724f" + integrity sha512-PvfEa1RR55dsik/IDkCKSFkk6ODNGJqPY3ysVUZqmnWMDSuqFtf7BPWHFa/53znpvVB5XaJ5Z1/6aR5CTIqxPw== + +"@next/swc-linux-arm64-gnu@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-12.1.6.tgz#ac55c07bfabde378dfa0ce2b8fc1c3b2897e81ae" + integrity sha512-53QOvX1jBbC2ctnmWHyRhMajGq7QZfl974WYlwclXarVV418X7ed7o/EzGY+YVAEKzIVaAB9JFFWGXn8WWo0gQ== + +"@next/swc-linux-arm64-musl@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-12.1.6.tgz#e429f826279894be9096be6bec13e75e3d6bd671" + integrity sha512-CMWAkYqfGdQCS+uuMA1A2UhOfcUYeoqnTW7msLr2RyYAys15pD960hlDfq7QAi8BCAKk0sQ2rjsl0iqMyziohQ== + +"@next/swc-linux-x64-gnu@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-12.1.6.tgz#1f276c0784a5ca599bfa34b2fcc0b38f3a738e08" + integrity sha512-AC7jE4Fxpn0s3ujngClIDTiEM/CQiB2N2vkcyWWn6734AmGT03Duq6RYtPMymFobDdAtZGFZd5nR95WjPzbZAQ== + +"@next/swc-linux-x64-musl@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-12.1.6.tgz#1d9933dd6ba303dcfd8a2acd6ac7c27ed41e2eea" + integrity sha512-c9Vjmi0EVk0Kou2qbrynskVarnFwfYIi+wKufR9Ad7/IKKuP6aEhOdZiIIdKsYWRtK2IWRF3h3YmdnEa2WLUag== + +"@next/swc-win32-arm64-msvc@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-12.1.6.tgz#2ef9837f12ca652b1783d72ecb86208906042f02" + integrity sha512-3UTOL/5XZSKFelM7qN0it35o3Cegm6LsyuERR3/OoqEExyj3aCk7F025b54/707HTMAnjlvQK3DzLhPu/xxO4g== + +"@next/swc-win32-ia32-msvc@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-12.1.6.tgz#74003d0aa1c59dfa56cb15481a5c607cbc0027b9" + integrity sha512-8ZWoj6nCq6fI1yCzKq6oK0jE6Mxlz4MrEsRyu0TwDztWQWe7rh4XXGLAa2YVPatYcHhMcUL+fQQbqd1MsgaSDA== + +"@next/swc-win32-x64-msvc@12.1.6": + version "12.1.6" + resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-12.1.6.tgz#a350caf42975e7197b24b495b8d764eec7e6a36e" + integrity sha512-4ZEwiRuZEicXhXqmhw3+de8Z4EpOLQj/gp+D9fFWo6ii6W1kBkNNvvEx4A90ugppu+74pT1lIJnOuz3A9oQeJA== + +"@nivo/annotations@0.79.1": + version "0.79.1" + resolved "https://registry.yarnpkg.com/@nivo/annotations/-/annotations-0.79.1.tgz#c1b93a1facf55e3f32e2af1b8fb0ba1bebc01910" + integrity sha512-lYso9Luu0maSDtIufwvyVt2+Wue7R9Fh3CIjuRDmNR72UjAgAVEcCar27Fy865UXGsj2hRJZ7KY/1s6kT3gu/w== + dependencies: + "@nivo/colors" "0.79.1" + "@react-spring/web" "9.3.1" + lodash "^4.17.21" + +"@nivo/arcs@0.79.1": + version "0.79.1" + resolved "https://registry.yarnpkg.com/@nivo/arcs/-/arcs-0.79.1.tgz#768d5e91356e94199377fbd0ca762bc364353414" + integrity sha512-owScoElMv5EwDbZKJhns282MnXVM4rq9jYwBnFBx872Igi2r6HwKk1m4jDWGfDktJ7MyECvuVzxRaUImWQdufA== + dependencies: + "@nivo/colors" "0.79.1" + "@react-spring/web" "9.3.1" + d3-shape "^1.3.5" + +"@nivo/axes@0.79.0": + version "0.79.0" + resolved "https://registry.yarnpkg.com/@nivo/axes/-/axes-0.79.0.tgz#6f009819b26f93a4126697152aeab5f979f1ab6c" + integrity sha512-EhSeCPxtWEuxqnifeyF/pIJEzL7pRM3rfygL+MpfT5ypu5NcXYRGQo/Bw0Vh+GF1ML+tNAE0rRvCu2jgLSdVNQ== + dependencies: + "@nivo/scales" "0.79.0" + "@react-spring/web" "9.3.1" + d3-format "^1.4.4" + d3-time-format "^3.0.0" + +"@nivo/bar@^0.79.1": + version "0.79.1" + resolved "https://registry.yarnpkg.com/@nivo/bar/-/bar-0.79.1.tgz#42d28169307e735cb84e57b4b6915195ef1c97fb" + integrity sha512-swJ2FtFeRPWJK9O6aZiqTDi2J6GrU2Z6kIHBBCXBlFmq6+vfd5AqOHytdXPTaN80JsKDBBdtY7tqRjpRPlDZwQ== + dependencies: + "@nivo/annotations" "0.79.1" + "@nivo/axes" "0.79.0" + "@nivo/colors" "0.79.1" + "@nivo/legends" "0.79.1" + "@nivo/scales" "0.79.0" + "@nivo/tooltip" "0.79.0" + "@react-spring/web" "9.3.1" + d3-scale "^3.2.3" + d3-shape "^1.2.2" + lodash "^4.17.21" + +"@nivo/colors@0.79.1": + version "0.79.1" + resolved "https://registry.yarnpkg.com/@nivo/colors/-/colors-0.79.1.tgz#0504c08b6a598bc5cb5a8b823d332a73fdc6ef43" + integrity sha512-45huBmz46OoQtfqzHrnqDJ9msebOBX84fTijyOBi8mn8iTDOK2xWgzT7cCYP3hKE58IclkibkzVyWCeJ+rUlqg== + dependencies: + d3-color "^2.0.0" + d3-scale "^3.2.3" + d3-scale-chromatic "^2.0.0" + lodash "^4.17.21" + +"@nivo/core@^0.79.0": + version "0.79.0" + resolved "https://registry.yarnpkg.com/@nivo/core/-/core-0.79.0.tgz#5755212c2058c20899990e7c8ec0e918ac00e5f5" + integrity sha512-e1iGodmGuXkF+QWAjhHVFc+lUnfBoUwaWqVcBXBfebzNc50tTJrTTMHyQczjgOIfTc8gEu23lAY4mVZCDKscig== + dependencies: + "@nivo/recompose" "0.79.0" + "@react-spring/web" "9.3.1" + d3-color "^2.0.0" + d3-format "^1.4.4" + d3-interpolate "^2.0.1" + d3-scale "^3.2.3" + d3-scale-chromatic "^2.0.0" + d3-shape "^1.3.5" + d3-time-format "^3.0.0" + lodash "^4.17.21" + +"@nivo/legends@0.79.1": + version "0.79.1" + resolved "https://registry.yarnpkg.com/@nivo/legends/-/legends-0.79.1.tgz#60b1806bba547f796e6e5b66943d65153de60c79" + integrity sha512-AoabiLherOAk3/HR/N791fONxNdwNk/gCTJC/6BKUo2nX+JngEYm3nVFmTC1R6RdjwJTeCb9Vtuc4MHA+mcgig== + +"@nivo/pie@^0.79.1": + version "0.79.1" + resolved "https://registry.yarnpkg.com/@nivo/pie/-/pie-0.79.1.tgz#4461e5273adabd0ef52bfcb54fbf6604f676d5a5" + integrity sha512-Cm8I6/nrmcpJLwziUhZ3TtwRV6K/7qWJ6alN6bUh8z7w2nScSnD/PhmAPS89p3jzSUEBPOvCViKwdvyThJ8KCg== + dependencies: + "@nivo/arcs" "0.79.1" + "@nivo/colors" "0.79.1" + "@nivo/legends" "0.79.1" + "@nivo/tooltip" "0.79.0" + d3-shape "^1.3.5" + +"@nivo/recompose@0.79.0": + version "0.79.0" + resolved "https://registry.yarnpkg.com/@nivo/recompose/-/recompose-0.79.0.tgz#c0c54ecabb2300ce672f3c3199f74629df33cc08" + integrity sha512-2GFnOHfA2jzTOA5mdKMwJ6myCRGoXQQbQvFFQ7B/+hnHfU/yrOVpiGt6TPAn3qReC4dyDYrzy1hr9UeQh677ig== + dependencies: + react-lifecycles-compat "^3.0.4" + +"@nivo/scales@0.79.0": + version "0.79.0" + resolved "https://registry.yarnpkg.com/@nivo/scales/-/scales-0.79.0.tgz#553b6910288080fbfbbe4d2aab1dd80e2d172e6e" + integrity sha512-5fAt5Wejp8yzAk6qmA3KU+celCxNYrrBhfvOi2ECDG8KQi+orbDnrO6qjVF6+ebfOn9az8ZVukcSeGA5HceiMg== + dependencies: + d3-scale "^3.2.3" + d3-time "^1.0.11" + d3-time-format "^3.0.0" + lodash "^4.17.21" + +"@nivo/tooltip@0.79.0": + version "0.79.0" + resolved "https://registry.yarnpkg.com/@nivo/tooltip/-/tooltip-0.79.0.tgz#3d46be8734e5d30e5387515db0c83bd1c795f442" + integrity sha512-hsJsvhDVR9P/QqIEDIttaA6aslR3tU9So1s/k2jMdppL7J9ZH/IrVx9TbIP7jDKmnU5AMIP5uSstXj9JiKLhQA== + dependencies: + "@react-spring/web" "9.3.1" + +"@popperjs/core@^2.11.5": + version "2.11.5" + resolved "https://registry.yarnpkg.com/@popperjs/core/-/core-2.11.5.tgz#db5a11bf66bdab39569719555b0f76e138d7bd64" + integrity sha512-9X2obfABZuDVLCgPK9aX0a/x4jaOEweTTWE2+9sr0Qqqevj2Uv5XorvusThmc9XGYpS9yI+fhh8RTafBtGposw== + +"@react-spring/animated@~9.3.0": + version "9.3.2" + resolved "https://registry.yarnpkg.com/@react-spring/animated/-/animated-9.3.2.tgz#bda85e92e9e9b6861c259f2dacb54270a37b0f39" + integrity sha512-pBvKydRHbTzuyaeHtxGIOvnskZxGo/S5/YK1rtYm88b9NQZuZa95Rgd3O0muFL+99nvBMBL8cvQGD0UJmsqQsg== + dependencies: + "@react-spring/shared" "~9.3.0" + "@react-spring/types" "~9.3.0" + +"@react-spring/core@~9.3.0": + version "9.3.2" + resolved "https://registry.yarnpkg.com/@react-spring/core/-/core-9.3.2.tgz#d1dc5810666ac18550db89c58567f28fbe04fb07" + integrity sha512-kMRjkgdQ6LJ0lmb/wQlONpghaMT83UxglXHJC6m9kZS/GKVmN//TYMEK85xN1rC5Gg+BmjG61DtLCSkkLDTfNw== + dependencies: + "@react-spring/animated" "~9.3.0" + "@react-spring/shared" "~9.3.0" + "@react-spring/types" "~9.3.0" + +"@react-spring/rafz@~9.3.0": + version "9.3.2" + resolved "https://registry.yarnpkg.com/@react-spring/rafz/-/rafz-9.3.2.tgz#0cbd296cd17bbf1e7e49d3b3616884e026d5fb67" + integrity sha512-YtqNnAYp5bl6NdnDOD5TcYS40VJmB+Civ4LPtcWuRPKDAOa/XAf3nep48r0wPTmkK936mpX8aIm7h+luW59u5A== + +"@react-spring/shared@~9.3.0": + version "9.3.2" + resolved "https://registry.yarnpkg.com/@react-spring/shared/-/shared-9.3.2.tgz#967ce1d8a16d820a99e6eeb2a8f7ca9311d9dfa0" + integrity sha512-ypGQQ8w7mWnrELLon4h6mBCBxdd8j1pgLzmHXLpTC/f4ya2wdP+0WIKBWXJymIf+5NiTsXgSJra5SnHP5FBY+A== + dependencies: + "@react-spring/rafz" "~9.3.0" + "@react-spring/types" "~9.3.0" + +"@react-spring/types@~9.3.0": + version "9.3.2" + resolved "https://registry.yarnpkg.com/@react-spring/types/-/types-9.3.2.tgz#0277d436e50d7a824897dd7bb880f4842fbcd0fe" + integrity sha512-u+IK9z9Re4hjNkBYKebZr7xVDYTai2RNBsI4UPL/k0B6lCNSwuqWIXfKZUDVlMOeZHtDqayJn4xz6HcSkTj3FQ== + +"@react-spring/web@9.3.1": + version "9.3.1" + resolved "https://registry.yarnpkg.com/@react-spring/web/-/web-9.3.1.tgz#5b377ba7ad52e746c2b59e2738c021de3f219d0b" + integrity sha512-sisZIgFGva/Z+xKWPSfXpukF0AP3kR9ALTxlHL87fVotMUCJX5vtH/YlVcywToEFwTHKt3MpI5Wy2M+vgVEeaw== + dependencies: + "@react-spring/animated" "~9.3.0" + "@react-spring/core" "~9.3.0" + "@react-spring/shared" "~9.3.0" + "@react-spring/types" "~9.3.0" + +"@types/node@^18.0.0": + version "18.0.0" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.0.0.tgz#67c7b724e1bcdd7a8821ce0d5ee184d3b4dd525a" + integrity sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA== + +"@types/parse-json@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" + integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== + +"@types/prop-types@*", "@types/prop-types@^15.7.5": + version "15.7.5" + resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" + integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== + +"@types/react-is@^16.7.1 || ^17.0.0": + version "17.0.3" + resolved "https://registry.yarnpkg.com/@types/react-is/-/react-is-17.0.3.tgz#2d855ba575f2fc8d17ef9861f084acc4b90a137a" + integrity sha512-aBTIWg1emtu95bLTLx0cpkxwGW3ueZv71nE2YFBpL8k/z5czEW8yYpOo8Dp+UUAFAtKwNaOsh/ioSeQnWlZcfw== + dependencies: + "@types/react" "*" + +"@types/react-transition-group@^4.4.4": + version "4.4.4" + resolved "https://registry.yarnpkg.com/@types/react-transition-group/-/react-transition-group-4.4.4.tgz#acd4cceaa2be6b757db61ed7b432e103242d163e" + integrity sha512-7gAPz7anVK5xzbeQW9wFBDg7G++aPLAFY0QaSMOou9rJZpbuI58WAuJrgu+qR92l61grlnCUe7AFX8KGahAgug== + dependencies: + "@types/react" "*" + +"@types/react@*", "@types/react@^18.0.14": + version "18.0.14" + resolved "https://registry.yarnpkg.com/@types/react/-/react-18.0.14.tgz#e016616ffff51dba01b04945610fe3671fdbe06d" + integrity sha512-x4gGuASSiWmo0xjDLpm5mPb52syZHJx02VKbqUKdLmKtAwIh63XClGsiTI1K6DO5q7ox4xAsQrU+Gl3+gGXF9Q== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/scheduler@*": + version "0.16.2" + resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" + integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== + +"@uniswap/token-lists@^1.0.0-beta.27": + version "1.0.0-beta.30" + resolved "https://registry.yarnpkg.com/@uniswap/token-lists/-/token-lists-1.0.0-beta.30.tgz#2103ca23b8007c59ec71718d34cdc97861c409e5" + integrity sha512-HwY2VvkQ8lNR6ks5NqQfAtg+4IZqz3KV1T8d2DlI8emIn9uMmaoFbIOg0nzjqAVKKnZSbMTRRtUoAh6mmjRvog== + +"@usedapp/core@1.0.9": + version "1.0.9" + resolved "https://registry.yarnpkg.com/@usedapp/core/-/core-1.0.9.tgz#f0f08d75be420d9377b3853a4aa99b4e99761cc3" + integrity sha512-vGugFfm55R99mwuJXh1enpiOgDSWOZ2akZ8E2nFJhXzqK6WlTkP7zZuKatlde10X7dLbVC2FTCx3ZhrtLWilIA== + dependencies: + "@metamask/detect-provider" "^1.2.0" + "@uniswap/token-lists" "^1.0.0-beta.27" + fetch-mock "^9.11.0" + lodash.merge "^4.6.2" + lodash.pickby "^4.6.0" + nanoid "3.1.22" + +aes-js@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/aes-js/-/aes-js-3.0.0.tgz#e21df10ad6c2053295bcbb8dab40b09dbea87e4d" + integrity sha512-H7wUZRn8WpTq9jocdxQ2c8x2sKo9ZVmzfRE13GiNJXfp7NcKYEdvl3vspKjXox6RIG2VtaRe4JFvxG4rqp2Zuw== + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== + +axios@^0.27.2: + version "0.27.2" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.27.2.tgz#207658cc8621606e586c85db4b41a750e756d972" + integrity sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ== + dependencies: + follow-redirects "^1.14.9" + form-data "^4.0.0" + +babel-plugin-macros@^2.6.1: + version "2.8.0" + resolved "https://registry.yarnpkg.com/babel-plugin-macros/-/babel-plugin-macros-2.8.0.tgz#0f958a7cc6556b1e65344465d99111a1e5e10138" + integrity sha512-SEP5kJpfGYqYKpBrj5XU3ahw5p5GOHJ0U5ssOSQ/WBVdwkD2Dzlce95exQTs3jOVWPPKLBN2rlEWkCK7dSmLvg== + dependencies: + "@babel/runtime" "^7.7.2" + cosmiconfig "^6.0.0" + resolve "^1.12.0" + +bech32@1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/bech32/-/bech32-1.1.4.tgz#e38c9f37bf179b8eb16ae3a772b40c356d4832e9" + integrity sha512-s0IrSOzLlbvX7yp4WBfPITzpAU8sqQcpsmwXDiKwrG4r491vwCO/XpejasRNl0piBMe/DvP4Tz0mIS/X1DPJBQ== + +bn.js@^4.11.9: + version "4.12.0" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" + integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== + +bn.js@^5.2.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.1.tgz#0bc527a6a0d18d0aa8d5b0538ce4a77dccfa7b70" + integrity sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ== + +brorand@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" + integrity sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w== + +browserslist@^4.20.2: + version "4.20.4" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.20.4.tgz#98096c9042af689ee1e0271333dbc564b8ce4477" + integrity sha512-ok1d+1WpnU24XYN7oC3QWgTyMhY/avPJ/r9T00xxvUOIparA/gc+UPUMaod3i+G6s+nI2nUb9xZ5k794uIwShw== + dependencies: + caniuse-lite "^1.0.30001349" + electron-to-chromium "^1.4.147" + escalade "^3.1.1" + node-releases "^2.0.5" + picocolors "^1.0.0" + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +caniuse-lite@^1.0.30001332, caniuse-lite@^1.0.30001349: + version "1.0.30001357" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001357.tgz#dec7fc4158ef6ad24690d0eec7b91f32b8cb1b5d" + integrity sha512-b+KbWHdHePp+ZpNj+RDHFChZmuN+J5EvuQUlee9jOQIUAdhv9uvAZeEtUeLAknXbkiu1uxjQ9NLp1ie894CuWg== + +chalk@^2.0.0: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +classnames@^2.2.6: + version "2.3.1" + resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.1.tgz#dfcfa3891e306ec1dad105d0e88f4417b8535e8e" + integrity sha512-OlQdbZ7gLfGarSqxesMesDa5uz7KFbID8Kpq/SxIoNGDqY8lSYs0D+hhtBXhcdB3rcbXArFr7vlHheLk1voeNA== + +clsx@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/clsx/-/clsx-1.1.1.tgz#98b3134f9abbdf23b2663491ace13c5c03a73188" + integrity sha512-6/bPho624p3S2pMyvP5kKBPXnI3ufHLObBFCfgx+LkeR5lg2XYy2hqZqUf45ypD8COn2bhgGJSUE+l5dhNBieA== + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== + +combined-stream@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +convert-source-map@^1.5.0, convert-source-map@^1.7.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" + integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== + dependencies: + safe-buffer "~5.1.1" + +core-js@^3.0.0: + version "3.23.2" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.23.2.tgz#e07a60ca8b14dd129cabdc3d2551baf5a01c76f0" + integrity sha512-ELJOWxNrJfOH/WK4VJ3Qd+fOqZuOuDNDJz0xG6Bt4mGg2eO/UT9CljCrbqDGovjLKUrGajEEBcoTOc0w+yBYeQ== + +cosmiconfig@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982" + integrity sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.1.0" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.7.2" + +css-vendor@^2.0.8: + version "2.0.8" + resolved "https://registry.yarnpkg.com/css-vendor/-/css-vendor-2.0.8.tgz#e47f91d3bd3117d49180a3c935e62e3d9f7f449d" + integrity sha512-x9Aq0XTInxrkuFeHKbYC7zWY8ai7qJ04Kxd9MnvbC1uO5DagxoHQjm4JvG+vCdXOoFtCjbL2XSZfxmoYa9uQVQ== + dependencies: + "@babel/runtime" "^7.8.3" + is-in-browser "^1.0.2" + +csstype@^3.0.2, csstype@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.0.tgz#4ddcac3718d787cf9df0d1b7d15033925c8f29f2" + integrity sha512-uX1KG+x9h5hIJsaKR9xHUeUraxf8IODOwq9JLNPq6BwB04a/xgpq3rcx47l5BZu5zBPlgD342tdke3Hom/nJRA== + +d3-array@2, d3-array@^2.3.0: + version "2.12.1" + resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-2.12.1.tgz#e20b41aafcdffdf5d50928004ececf815a465e81" + integrity sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ== + dependencies: + internmap "^1.0.0" + +"d3-color@1 - 2", d3-color@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-2.0.0.tgz#8d625cab42ed9b8f601a1760a389f7ea9189d62e" + integrity sha512-SPXi0TSKPD4g9tw0NMZFnR95XVgUZiBH+uUTqQuDu1OsE2zomHU7ho0FISciaPvosimixwHFl3WHLGabv6dDgQ== + +"d3-format@1 - 2": + version "2.0.0" + resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-2.0.0.tgz#a10bcc0f986c372b729ba447382413aabf5b0767" + integrity sha512-Ab3S6XuE/Q+flY96HXT0jOXcM4EAClYFnRGY5zsjRGNy6qCYrQsMffs7cV5Q9xejb35zxW5hf/guKw34kvIKsA== + +d3-format@^1.4.4: + version "1.4.5" + resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-1.4.5.tgz#374f2ba1320e3717eb74a9356c67daee17a7edb4" + integrity sha512-J0piedu6Z8iB6TbIGfZgDzfXxUFN3qQRMofy2oPdXzQibYGqPB/9iMcxr/TGalU+2RsyDO+U4f33id8tbnSRMQ== + +"d3-interpolate@1 - 2", "d3-interpolate@1.2.0 - 2", d3-interpolate@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-2.0.1.tgz#98be499cfb8a3b94d4ff616900501a64abc91163" + integrity sha512-c5UhwwTs/yybcmTpAVqwSFl6vrQ8JZJoT5F7xNFK9pymv5C0Ymcc9/LIJHtYIggg/yS9YHw8i8O8tgb9pupjeQ== + dependencies: + d3-color "1 - 2" + +d3-path@1: + version "1.0.9" + resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-1.0.9.tgz#48c050bb1fe8c262493a8caf5524e3e9591701cf" + integrity sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg== + +d3-scale-chromatic@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/d3-scale-chromatic/-/d3-scale-chromatic-2.0.0.tgz#c13f3af86685ff91323dc2f0ebd2dabbd72d8bab" + integrity sha512-LLqy7dJSL8yDy7NRmf6xSlsFZ6zYvJ4BcWFE4zBrOPnQERv9zj24ohnXKRbyi9YHnYV+HN1oEO3iFK971/gkzA== + dependencies: + d3-color "1 - 2" + d3-interpolate "1 - 2" + +d3-scale@^3.2.3: + version "3.3.0" + resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-3.3.0.tgz#28c600b29f47e5b9cd2df9749c206727966203f3" + integrity sha512-1JGp44NQCt5d1g+Yy+GeOnZP7xHo0ii8zsQp6PGzd+C1/dl0KGsp9A7Mxwp+1D1o4unbTTxVdU/ZOIEBoeZPbQ== + dependencies: + d3-array "^2.3.0" + d3-format "1 - 2" + d3-interpolate "1.2.0 - 2" + d3-time "^2.1.1" + d3-time-format "2 - 3" + +d3-shape@^1.2.2, d3-shape@^1.3.5: + version "1.3.7" + resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-1.3.7.tgz#df63801be07bc986bc54f63789b4fe502992b5d7" + integrity sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw== + dependencies: + d3-path "1" + +"d3-time-format@2 - 3", d3-time-format@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-3.0.0.tgz#df8056c83659e01f20ac5da5fdeae7c08d5f1bb6" + integrity sha512-UXJh6EKsHBTjopVqZBhFysQcoXSv/5yLONZvkQ5Kk3qbwiUYkdX17Xa1PT6U1ZWXGGfB1ey5L8dKMlFq2DO0Ag== + dependencies: + d3-time "1 - 2" + +"d3-time@1 - 2", d3-time@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-2.1.1.tgz#e9d8a8a88691f4548e68ca085e5ff956724a6682" + integrity sha512-/eIQe/eR4kCQwq7yxi7z4c6qEXf2IYGcjoWB5OOQy4Tq9Uv39/947qlDcN2TLkiTzQWzvnsuYPB9TrWaNfipKQ== + dependencies: + d3-array "2" + +d3-time@^1.0.11: + version "1.1.0" + resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-1.1.0.tgz#b1e19d307dae9c900b7e5b25ffc5dcc249a8a0f1" + integrity sha512-Xh0isrZ5rPYYdqhAVk8VLnMEidhz5aP7htAADH6MfzgmmicPkTo8LhkLxci61/lCB7n7UmE3bN0leRt+qvkLxA== + +debug@^4.1.0, debug@^4.1.1: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== + +dom-helpers@^5.0.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/dom-helpers/-/dom-helpers-5.2.1.tgz#d9400536b2bf8225ad98fe052e029451ac40e902" + integrity sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA== + dependencies: + "@babel/runtime" "^7.8.7" + csstype "^3.0.2" + +electron-to-chromium@^1.4.147: + version "1.4.162" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.162.tgz#69f8b900477208544a6e2a6e9bd3dc9e73163ed8" + integrity sha512-JrMk3tR2rnBojfAipp9nGh/vcWyBHeNsAVBqehtk4vq0o1bE4sVw19ICeidNx3u0i2yg4X8BvyUIM/yo2vO9aA== + +elliptic@6.5.4: + version "6.5.4" + resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" + integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== + dependencies: + bn.js "^4.11.9" + brorand "^1.1.0" + hash.js "^1.0.0" + hmac-drbg "^1.0.1" + inherits "^2.0.4" + minimalistic-assert "^1.0.1" + minimalistic-crypto-utils "^1.0.1" + +enhanced-resolve@^5.7.0: + version "5.9.3" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.9.3.tgz#44a342c012cbc473254af5cc6ae20ebd0aae5d88" + integrity sha512-Bq9VSor+kjvW3f9/MiiR4eE3XYgOl7/rS8lnSxbRbF3kS0B2r+Y9w5krBWxZgDxASVZbdYrn5wT4j/Wb0J9qow== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.2.0" + +error-ex@^1.3.1: + version "1.3.2" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +escalade@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +ethers@^5.6.9: + version "5.6.9" + resolved "https://registry.yarnpkg.com/ethers/-/ethers-5.6.9.tgz#4e12f8dfcb67b88ae7a78a9519b384c23c576a4d" + integrity sha512-lMGC2zv9HC5EC+8r429WaWu3uWJUCgUCt8xxKCFqkrFuBDZXDYIdzDUECxzjf2BMF8IVBByY1EBoGSL3RTm8RA== + dependencies: + "@ethersproject/abi" "5.6.4" + "@ethersproject/abstract-provider" "5.6.1" + "@ethersproject/abstract-signer" "5.6.2" + "@ethersproject/address" "5.6.1" + "@ethersproject/base64" "5.6.1" + "@ethersproject/basex" "5.6.1" + "@ethersproject/bignumber" "5.6.2" + "@ethersproject/bytes" "5.6.1" + "@ethersproject/constants" "5.6.1" + "@ethersproject/contracts" "5.6.2" + "@ethersproject/hash" "5.6.1" + "@ethersproject/hdnode" "5.6.2" + "@ethersproject/json-wallets" "5.6.1" + "@ethersproject/keccak256" "5.6.1" + "@ethersproject/logger" "5.6.0" + "@ethersproject/networks" "5.6.4" + "@ethersproject/pbkdf2" "5.6.1" + "@ethersproject/properties" "5.6.0" + "@ethersproject/providers" "5.6.8" + "@ethersproject/random" "5.6.1" + "@ethersproject/rlp" "5.6.1" + "@ethersproject/sha2" "5.6.1" + "@ethersproject/signing-key" "5.6.2" + "@ethersproject/solidity" "5.6.1" + "@ethersproject/strings" "5.6.1" + "@ethersproject/transactions" "5.6.2" + "@ethersproject/units" "5.6.1" + "@ethersproject/wallet" "5.6.2" + "@ethersproject/web" "5.6.1" + "@ethersproject/wordlists" "5.6.1" + +fetch-mock@^9.11.0: + version "9.11.0" + resolved "https://registry.yarnpkg.com/fetch-mock/-/fetch-mock-9.11.0.tgz#371c6fb7d45584d2ae4a18ee6824e7ad4b637a3f" + integrity sha512-PG1XUv+x7iag5p/iNHD4/jdpxL9FtVSqRMUQhPab4hVDt80T1MH5ehzVrL2IdXO9Q2iBggArFvPqjUbHFuI58Q== + dependencies: + "@babel/core" "^7.0.0" + "@babel/runtime" "^7.0.0" + core-js "^3.0.0" + debug "^4.1.1" + glob-to-regexp "^0.4.0" + is-subset "^0.1.1" + lodash.isequal "^4.5.0" + path-to-regexp "^2.2.1" + querystring "^0.2.0" + whatwg-url "^6.5.0" + +find-root@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/find-root/-/find-root-1.1.0.tgz#abcfc8ba76f708c42a97b3d685b7e9450bfb9ce4" + integrity sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng== + +follow-redirects@^1.14.9: + version "1.15.1" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.1.tgz#0ca6a452306c9b276e4d3127483e29575e207ad5" + integrity sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA== + +form-data@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" + integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +glob-to-regexp@^0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" + integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== + +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +graceful-fs@^4.2.4: + version "4.2.10" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +hash.js@1.1.7, hash.js@^1.0.0, hash.js@^1.0.3: + version "1.1.7" + resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" + integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== + dependencies: + inherits "^2.0.3" + minimalistic-assert "^1.0.1" + +hmac-drbg@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" + integrity sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg== + dependencies: + hash.js "^1.0.3" + minimalistic-assert "^1.0.0" + minimalistic-crypto-utils "^1.0.1" + +hoist-non-react-statics@^3.3.1, hoist-non-react-statics@^3.3.2: + version "3.3.2" + resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz#ece0acaf71d62c2969c2ec59feff42a4b1a85b45" + integrity sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw== + dependencies: + react-is "^16.7.0" + +hyphenate-style-name@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/hyphenate-style-name/-/hyphenate-style-name-1.0.4.tgz#691879af8e220aea5750e8827db4ef62a54e361d" + integrity sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ== + +import-fresh@^3.1.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +inherits@^2.0.3, inherits@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +internmap@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/internmap/-/internmap-1.0.1.tgz#0017cc8a3b99605f0302f2b198d272e015e5df95" + integrity sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw== + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== + +is-core-module@^2.9.0: + version "2.9.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.9.0.tgz#e1c34429cd51c6dd9e09e0799e396e27b19a9c69" + integrity sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A== + dependencies: + has "^1.0.3" + +is-in-browser@^1.0.2, is-in-browser@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/is-in-browser/-/is-in-browser-1.1.3.tgz#56ff4db683a078c6082eb95dad7dc62e1d04f835" + integrity sha512-FeXIBgG/CPGd/WUxuEyvgGTEfwiG9Z4EKGxjNMRqviiIIfsmgrpnHLffEDdwUHqNva1VEW91o3xBT/m8Elgl9g== + +is-subset@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/is-subset/-/is-subset-0.1.1.tgz#8a59117d932de1de00f245fcdd39ce43f1e939a6" + integrity sha512-6Ybun0IkarhmEqxXCNw/C0bna6Zb/TkfUX9UbwJtK6ObwAVCxmAP308WWTHviM/zAqXk05cdhYsUsZeGQh99iw== + +isomorphic-fetch@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/isomorphic-fetch/-/isomorphic-fetch-3.0.0.tgz#0267b005049046d2421207215d45d6a262b8b8b4" + integrity sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA== + dependencies: + node-fetch "^2.6.1" + whatwg-fetch "^3.4.1" + +js-sha3@0.8.0: + version "0.8.0" + resolved "https://registry.yarnpkg.com/js-sha3/-/js-sha3-0.8.0.tgz#b9b7a5da73afad7dedd0f8c463954cbde6818840" + integrity sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q== + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +json-parse-even-better-errors@^2.3.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + +json5@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" + integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== + +jss-plugin-camel-case@^10.8.2: + version "10.9.0" + resolved "https://registry.yarnpkg.com/jss-plugin-camel-case/-/jss-plugin-camel-case-10.9.0.tgz#4921b568b38d893f39736ee8c4c5f1c64670aaf7" + integrity sha512-UH6uPpnDk413/r/2Olmw4+y54yEF2lRIV8XIZyuYpgPYTITLlPOsq6XB9qeqv+75SQSg3KLocq5jUBXW8qWWww== + dependencies: + "@babel/runtime" "^7.3.1" + hyphenate-style-name "^1.0.3" + jss "10.9.0" + +jss-plugin-default-unit@^10.8.2: + version "10.9.0" + resolved "https://registry.yarnpkg.com/jss-plugin-default-unit/-/jss-plugin-default-unit-10.9.0.tgz#bb23a48f075bc0ce852b4b4d3f7582bc002df991" + integrity sha512-7Ju4Q9wJ/MZPsxfu4T84mzdn7pLHWeqoGd/D8O3eDNNJ93Xc8PxnLmV8s8ZPNRYkLdxZqKtm1nPQ0BM4JRlq2w== + dependencies: + "@babel/runtime" "^7.3.1" + jss "10.9.0" + +jss-plugin-global@^10.8.2: + version "10.9.0" + resolved "https://registry.yarnpkg.com/jss-plugin-global/-/jss-plugin-global-10.9.0.tgz#fc07a0086ac97aca174e37edb480b69277f3931f" + integrity sha512-4G8PHNJ0x6nwAFsEzcuVDiBlyMsj2y3VjmFAx/uHk/R/gzJV+yRHICjT4MKGGu1cJq2hfowFWCyrr/Gg37FbgQ== + dependencies: + "@babel/runtime" "^7.3.1" + jss "10.9.0" + +jss-plugin-nested@^10.8.2: + version "10.9.0" + resolved "https://registry.yarnpkg.com/jss-plugin-nested/-/jss-plugin-nested-10.9.0.tgz#cc1c7d63ad542c3ccc6e2c66c8328c6b6b00f4b3" + integrity sha512-2UJnDrfCZpMYcpPYR16oZB7VAC6b/1QLsRiAutOt7wJaaqwCBvNsosLEu/fUyKNQNGdvg2PPJFDO5AX7dwxtoA== + dependencies: + "@babel/runtime" "^7.3.1" + jss "10.9.0" + tiny-warning "^1.0.2" + +jss-plugin-props-sort@^10.8.2: + version "10.9.0" + resolved "https://registry.yarnpkg.com/jss-plugin-props-sort/-/jss-plugin-props-sort-10.9.0.tgz#30e9567ef9479043feb6e5e59db09b4de687c47d" + integrity sha512-7A76HI8bzwqrsMOJTWKx/uD5v+U8piLnp5bvru7g/3ZEQOu1+PjHvv7bFdNO3DwNPC9oM0a//KwIJsIcDCjDzw== + dependencies: + "@babel/runtime" "^7.3.1" + jss "10.9.0" + +jss-plugin-rule-value-function@^10.8.2: + version "10.9.0" + resolved "https://registry.yarnpkg.com/jss-plugin-rule-value-function/-/jss-plugin-rule-value-function-10.9.0.tgz#379fd2732c0746fe45168011fe25544c1a295d67" + integrity sha512-IHJv6YrEf8pRzkY207cPmdbBstBaE+z8pazhPShfz0tZSDtRdQua5jjg6NMz3IbTasVx9FdnmptxPqSWL5tyJg== + dependencies: + "@babel/runtime" "^7.3.1" + jss "10.9.0" + tiny-warning "^1.0.2" + +jss-plugin-vendor-prefixer@^10.8.2: + version "10.9.0" + resolved "https://registry.yarnpkg.com/jss-plugin-vendor-prefixer/-/jss-plugin-vendor-prefixer-10.9.0.tgz#aa9df98abfb3f75f7ed59a3ec50a5452461a206a" + integrity sha512-MbvsaXP7iiVdYVSEoi+blrW+AYnTDvHTW6I6zqi7JcwXdc6I9Kbm234nEblayhF38EftoenbM+5218pidmC5gA== + dependencies: + "@babel/runtime" "^7.3.1" + css-vendor "^2.0.8" + jss "10.9.0" + +jss@10.9.0, jss@^10.8.2: + version "10.9.0" + resolved "https://registry.yarnpkg.com/jss/-/jss-10.9.0.tgz#7583ee2cdc904a83c872ba695d1baab4b59c141b" + integrity sha512-YpzpreB6kUunQBbrlArlsMpXYyndt9JATbt95tajx0t4MTJJcCJdd4hdNpHmOIDiUJrF/oX5wtVFrS3uofWfGw== + dependencies: + "@babel/runtime" "^7.3.1" + csstype "^3.0.2" + is-in-browser "^1.1.3" + tiny-warning "^1.0.2" + +lines-and-columns@^1.1.6: + version "1.2.4" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== + +lodash.isequal@^4.5.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/lodash.isequal/-/lodash.isequal-4.5.0.tgz#415c4478f2bcc30120c22ce10ed3226f7d3e18e0" + integrity sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ== + +lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +lodash.pickby@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/lodash.pickby/-/lodash.pickby-4.6.0.tgz#7dea21d8c18d7703a27c704c15d3b84a67e33aff" + integrity sha512-AZV+GsS/6ckvPOVQPXSiFFacKvKB4kOQu6ynt9wz0F3LO4R9Ij4K1ddYsIytDpSgLz88JHd9P+oaLeej5/Sl7Q== + +lodash.sortby@^4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" + integrity sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA== + +lodash@^4.17.21: + version "4.17.21" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +loose-envify@^1.1.0, loose-envify@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" + integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== + +minimalistic-crypto-utils@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" + integrity sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg== + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +nanoid@3.1.22: + version "3.1.22" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.1.22.tgz#b35f8fb7d151990a8aebd5aa5015c03cf726f844" + integrity sha512-/2ZUaJX2ANuLtTvqTlgqBQNJoQO398KyJgZloL0PZkC0dpysjncRUPsFe3DUPzz/y3h+u7C46np8RMuvF3jsSQ== + +nanoid@^3.1.30: + version "3.3.4" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab" + integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw== + +next-transpile-modules@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/next-transpile-modules/-/next-transpile-modules-9.0.0.tgz#133b1742af082e61cc76b02a0f12ffd40ce2bf90" + integrity sha512-VCNFOazIAnXn1hvgYYSTYMnoWgKgwlYh4lm1pKbSfiB3kj5ZYLcKVhfh3jkPOg1cnd9DP+pte9yCUocdPEUBTQ== + dependencies: + enhanced-resolve "^5.7.0" + escalade "^3.1.1" + +next@12: + version "12.1.6" + resolved "https://registry.yarnpkg.com/next/-/next-12.1.6.tgz#eb205e64af1998651f96f9df44556d47d8bbc533" + integrity sha512-cebwKxL3/DhNKfg9tPZDQmbRKjueqykHHbgaoG4VBRH3AHQJ2HO0dbKFiS1hPhe1/qgc2d/hFeadsbPicmLD+A== + dependencies: + "@next/env" "12.1.6" + caniuse-lite "^1.0.30001332" + postcss "8.4.5" + styled-jsx "5.0.2" + optionalDependencies: + "@next/swc-android-arm-eabi" "12.1.6" + "@next/swc-android-arm64" "12.1.6" + "@next/swc-darwin-arm64" "12.1.6" + "@next/swc-darwin-x64" "12.1.6" + "@next/swc-linux-arm-gnueabihf" "12.1.6" + "@next/swc-linux-arm64-gnu" "12.1.6" + "@next/swc-linux-arm64-musl" "12.1.6" + "@next/swc-linux-x64-gnu" "12.1.6" + "@next/swc-linux-x64-musl" "12.1.6" + "@next/swc-win32-arm64-msvc" "12.1.6" + "@next/swc-win32-ia32-msvc" "12.1.6" + "@next/swc-win32-x64-msvc" "12.1.6" + +node-fetch@^2.6.1: + version "2.6.7" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" + integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== + dependencies: + whatwg-url "^5.0.0" + +node-releases@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.5.tgz#280ed5bc3eba0d96ce44897d8aee478bfb3d9666" + integrity sha512-U9h1NLROZTq9uE1SNffn6WuPDg8icmi3ns4rEl/oTfIle4iLjTliCzgTsbaIFMq/Xn078/lfY/BL0GWZ+psK4Q== + +object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parse-json@^5.0.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" + integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== + dependencies: + "@babel/code-frame" "^7.0.0" + error-ex "^1.3.1" + json-parse-even-better-errors "^2.3.0" + lines-and-columns "^1.1.6" + +path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-to-regexp@^2.2.1: + version "2.4.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-2.4.0.tgz#35ce7f333d5616f1c1e1bfe266c3aba2e5b2e704" + integrity sha512-G6zHoVqC6GGTQkZwF4lkuEyMbVOjoBKAEybQUypI1WTkqinCOrq2x6U2+phkJ1XsEMTy4LjtwPI7HW+NVrRR2w== + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +picocolors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + +postcss@8.4.5: + version "8.4.5" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.5.tgz#bae665764dfd4c6fcc24dc0fdf7e7aa00cc77f95" + integrity sha512-jBDboWM8qpaqwkMwItqTQTiFikhs/67OYVvblFFTM7MrZjt6yMKd6r2kgXizEbTTljacm4NldIlZnhbjr84QYg== + dependencies: + nanoid "^3.1.30" + picocolors "^1.0.0" + source-map-js "^1.0.1" + +prettier@^2.6.2: + version "2.7.1" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.7.1.tgz#e235806850d057f97bb08368a4f7d899f7760c64" + integrity sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g== + +prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1: + version "15.8.1" + resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" + integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== + dependencies: + loose-envify "^1.4.0" + object-assign "^4.1.1" + react-is "^16.13.1" + +punycode@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +qrcode.react@^3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/qrcode.react/-/qrcode.react-3.0.2.tgz#7ceaea165aa7066253ef670a25bf238eaec4eb9e" + integrity sha512-8F3SGxSkNb3fMIHdlseqjFjLbsPrF3WvF/1MOboSUUHytT537W8f/FtbdA3XFIHDrc+TrRBjTI/QLmwhAIGWWw== + +querystring@^0.2.0: + version "0.2.1" + resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.1.tgz#40d77615bb09d16902a85c3e38aa8b5ed761c2dd" + integrity sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg== + +react-dom@^18.2.0: + version "18.2.0" + resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-18.2.0.tgz#22aaf38708db2674ed9ada224ca4aa708d821e3d" + integrity sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g== + dependencies: + loose-envify "^1.1.0" + scheduler "^0.23.0" + +react-is@^16.13.1, react-is@^16.7.0: + version "16.13.1" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" + integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== + +react-is@^17.0.2: + version "17.0.2" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0" + integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== + +react-lifecycles-compat@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz#4f1a273afdfc8f3488a8c516bfda78f872352362" + integrity sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA== + +react-number-format@^4.4.4: + version "4.9.3" + resolved "https://registry.yarnpkg.com/react-number-format/-/react-number-format-4.9.3.tgz#338500fe9c61b1ac73c8d6dff4ec97dd13fd2b50" + integrity sha512-am1A1xYAbENuKJ+zpM7V+B1oRTSeOHYltqVKExznIVFweBzhLmOBmyb1DfIKjHo90E0bo1p3nzVJ2NgS5xh+sQ== + dependencies: + prop-types "^15.7.2" + +react-transition-group@^4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/react-transition-group/-/react-transition-group-4.4.2.tgz#8b59a56f09ced7b55cbd53c36768b922890d5470" + integrity sha512-/RNYfRAMlZwDSr6z4zNKV6xu53/e2BuaBbGhbyYIXTrmgu/bGHzmqOs7mJSJBHy9Ud+ApHx3QjrkKSp1pxvlFg== + dependencies: + "@babel/runtime" "^7.5.5" + dom-helpers "^5.0.1" + loose-envify "^1.4.0" + prop-types "^15.6.2" + +react@^18.2.0: + version "18.2.0" + resolved "https://registry.yarnpkg.com/react/-/react-18.2.0.tgz#555bd98592883255fa00de14f1151a917b5d77d5" + integrity sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ== + dependencies: + loose-envify "^1.1.0" + +regenerator-runtime@^0.13.4: + version "0.13.9" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" + integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== + +reselect@^4.1.5: + version "4.1.6" + resolved "https://registry.yarnpkg.com/reselect/-/reselect-4.1.6.tgz#19ca2d3d0b35373a74dc1c98692cdaffb6602656" + integrity sha512-ZovIuXqto7elwnxyXbBtCPo9YFEr3uJqj2rRbcOOog1bmu2Ag85M4hixSwFWyaBMKXNgvPaJ9OSu9SkBPIeJHQ== + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve@^1.12.0: + version "1.22.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" + integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +rifm@^0.12.1: + version "0.12.1" + resolved "https://registry.yarnpkg.com/rifm/-/rifm-0.12.1.tgz#8fa77f45b7f1cda2a0068787ac821f0593967ac4" + integrity sha512-OGA1Bitg/dSJtI/c4dh90svzaUPt228kzFsUkJbtA2c964IqEAwWXeL9ZJi86xWv3j5SMqRvGULl7bA6cK0Bvg== + +safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +scheduler@^0.23.0: + version "0.23.0" + resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.23.0.tgz#ba8041afc3d30eb206a487b6b384002e4e61fdfe" + integrity sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw== + dependencies: + loose-envify "^1.1.0" + +scrypt-js@3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/scrypt-js/-/scrypt-js-3.0.1.tgz#d314a57c2aef69d1ad98a138a21fe9eafa9ee312" + integrity sha512-cdwTTnqPu0Hyvf5in5asVdZocVDTNRmR7XEcJuIzMjJeSHybHl7vpB66AzwTaIg6CLSbtjcxc8fqcySfnTkccA== + +semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +source-map-js@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" + integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== + +source-map@^0.5.7: + version "0.5.7" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== + +styled-jsx@5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/styled-jsx/-/styled-jsx-5.0.2.tgz#ff230fd593b737e9e68b630a694d460425478729" + integrity sha512-LqPQrbBh3egD57NBcHET4qcgshPks+yblyhPlH2GY8oaDgKs8SK4C3dBh3oSJjgzJ3G5t1SYEZGHkP+QEpX9EQ== + +stylis@4.0.13: + version "4.0.13" + resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.0.13.tgz#f5db332e376d13cc84ecfe5dace9a2a51d954c91" + integrity sha512-xGPXiFVl4YED9Jh7Euv2V220mriG9u4B2TA6Ybjc1catrstKD2PpIdU3U0RKpkVBC2EhmL/F0sPCr9vrFTNRag== + +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +tapable@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" + integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== + +tiny-warning@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/tiny-warning/-/tiny-warning-1.0.3.tgz#94a30db453df4c643d0fd566060d60a875d84754" + integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA== + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== + +tr46@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" + integrity sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA== + dependencies: + punycode "^2.1.0" + +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== + +typescript@^4.1.3: + version "4.7.4" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.7.4.tgz#1a88596d1cf47d59507a1bcdfb5b9dfe4d488235" + integrity sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ== + +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== + +webidl-conversions@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" + integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== + +whatwg-fetch@^3.4.1: + version "3.6.2" + resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" + integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== + +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + +whatwg-url@^6.5.0: + version "6.5.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-6.5.0.tgz#f2df02bff176fd65070df74ad5ccbb5a199965a8" + integrity sha512-rhRZRqx/TLJQWUpQ6bmrt2UV4f0HCQ463yQuONJqC6fO2VoEb1pTYddbe59SkYq87aoM5A3bdhMZiUiVws+fzQ== + dependencies: + lodash.sortby "^4.7.0" + tr46 "^1.0.1" + webidl-conversions "^4.0.2" + +ws@7.4.6: + version "7.4.6" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.6.tgz#5654ca8ecdeee47c33a9a4bf6d28e2be2980377c" + integrity sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A== + +yaml@^1.7.2: + version "1.10.2" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" + integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== diff --git a/go.mod b/go.mod index 385d97f95e..d7d2f80d68 100644 --- a/go.mod +++ b/go.mod @@ -2,67 +2,124 @@ module github.com/c9s/bbgo -go 1.13 +go 1.17 require ( github.com/DATA-DOG/go-sqlmock v1.5.0 - github.com/adshao/go-binance/v2 v2.2.1-0.20210119141603-20ceb26d876b - github.com/c9s/rockhopper v1.2.1-0.20210115022144-cc77e66fc34f + github.com/Masterminds/squirrel v1.5.3 + github.com/adshao/go-binance/v2 v2.3.5 + github.com/c9s/requestgen v1.3.0 + github.com/c9s/rockhopper v1.2.2-0.20220617053729-ffdc87df194b github.com/codingconcepts/env v0.0.0-20200821220118-a8fbf8d84482 + github.com/evanphx/json-patch/v5 v5.6.0 + github.com/fatih/camelcase v1.0.0 + github.com/fatih/color v1.13.0 + github.com/gertd/go-pluralize v0.2.1 + github.com/gin-contrib/cors v1.3.1 + github.com/gin-gonic/gin v1.7.0 + github.com/go-redis/redis/v8 v8.8.0 + github.com/go-sql-driver/mysql v1.6.0 + github.com/gofrs/flock v0.8.1 + github.com/google/uuid v1.3.0 + github.com/gorilla/websocket v1.5.0 + github.com/jmoiron/sqlx v1.3.4 + github.com/joho/godotenv v1.3.0 + github.com/leekchan/accounting v0.0.0-20191218023648-17a4ce5f94d4 + github.com/lestrrat-go/file-rotatelogs v2.2.0+incompatible + github.com/mattn/go-shellwords v1.0.12 + github.com/muesli/clusters v0.0.0-20180605185049-a07a36e67d36 + github.com/muesli/kmeans v0.3.0 + github.com/pkg/errors v0.9.1 + github.com/pquerna/otp v1.3.0 + github.com/prometheus/client_golang v1.11.0 + github.com/rifflock/lfshook v0.0.0-20180920164130-b9218ef580f5 + github.com/robfig/cron/v3 v3.0.0 + github.com/sajari/regression v1.0.1 + github.com/sirupsen/logrus v1.8.1 + github.com/slack-go/slack v0.10.1 + github.com/spf13/cobra v1.1.1 + github.com/spf13/pflag v1.0.5 + github.com/spf13/viper v1.7.1 + github.com/stretchr/testify v1.7.0 + github.com/valyala/fastjson v1.5.1 + github.com/webview/webview v0.0.0-20210216142346-e0bfdf0e5d90 + github.com/x-cray/logrus-prefixed-formatter v0.5.2 + github.com/zserge/lorca v0.1.9 + go.uber.org/multierr v1.7.0 + golang.org/x/time v0.0.0-20201208040808-7e3f01d25324 + gonum.org/v1/gonum v0.8.1 + google.golang.org/grpc v1.45.0 + google.golang.org/protobuf v1.28.0 + gopkg.in/tucnak/telebot.v2 v2.5.0 + gopkg.in/yaml.v3 v3.0.1 +) + +require ( + github.com/beorn7/perks v1.0.1 // indirect + github.com/bitly/go-simplejson v0.5.0 // indirect + github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc // indirect + github.com/cespare/xxhash/v2 v2.1.2 // indirect + github.com/cockroachdb/apd v1.1.0 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/denisenkom/go-mssqldb v0.12.2 // indirect + github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect github.com/fastly/go-utils v0.0.0-20180712184237-d95a45783239 // indirect - github.com/gin-gonic/gin v1.6.3 + github.com/fsnotify/fsnotify v1.4.9 // indirect + github.com/gin-contrib/sse v0.1.0 // indirect + github.com/go-playground/locales v0.13.0 // indirect + github.com/go-playground/universal-translator v0.17.0 // indirect github.com/go-playground/validator/v10 v10.4.1 // indirect - github.com/go-redis/redis/v8 v8.4.0 - github.com/go-sql-driver/mysql v1.5.0 github.com/go-test/deep v1.0.6 // indirect - github.com/golang/protobuf v1.4.3 // indirect - github.com/google/uuid v1.1.2 - github.com/gorilla/websocket v1.4.2 + github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect + github.com/golang-sql/sqlexp v0.1.0 // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/inconshreveable/mousetrap v1.0.0 // indirect github.com/jehiah/go-strftime v0.0.0-20171201141054-1d33003b3869 // indirect - github.com/jmoiron/sqlx v1.2.0 - github.com/json-iterator/go v1.1.10 // indirect - github.com/leekchan/accounting v0.0.0-20191218023648-17a4ce5f94d4 + github.com/json-iterator/go v1.1.11 // indirect + github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect + github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect github.com/leodido/go-urn v1.2.1 // indirect - github.com/lestrrat-go/file-rotatelogs v2.2.0+incompatible github.com/lestrrat-go/strftime v1.0.0 // indirect + github.com/lib/pq v1.10.6 // indirect github.com/magiconair/properties v1.8.4 // indirect - github.com/mattn/go-colorable v0.1.2 // indirect - github.com/mattn/go-isatty v0.0.12 // indirect - github.com/mattn/go-sqlite3 v2.0.3+incompatible // indirect + github.com/mattn/go-colorable v0.1.9 // indirect + github.com/mattn/go-isatty v0.0.14 // indirect + github.com/mattn/go-sqlite3 v1.14.13 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect + github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect github.com/mitchellh/mapstructure v1.4.1 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.1 // indirect + github.com/muesli/clusters v0.0.0-20180605185049-a07a36e67d36 // indirect + github.com/muesli/kmeans v0.3.0 // indirect github.com/pelletier/go-toml v1.8.1 // indirect - github.com/pkg/errors v0.9.1 - github.com/pquerna/otp v1.3.0 - github.com/rifflock/lfshook v0.0.0-20180920164130-b9218ef580f5 - github.com/robfig/cron/v3 v3.0.0 + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/prometheus/client_model v0.2.0 // indirect + github.com/prometheus/common v0.32.1 // indirect + github.com/prometheus/procfs v0.7.3 // indirect + github.com/russross/blackfriday/v2 v2.0.1 // indirect github.com/shopspring/decimal v1.2.0 // indirect - github.com/sirupsen/logrus v1.7.0 - github.com/slack-go/slack v0.6.6-0.20200602212211-b04b8521281b + github.com/shurcooL/sanitized_anchor_name v1.0.0 // indirect github.com/spf13/afero v1.5.1 // indirect github.com/spf13/cast v1.3.1 // indirect - github.com/spf13/cobra v1.1.1 github.com/spf13/jwalterweatherman v1.1.0 // indirect - github.com/spf13/pflag v1.0.5 - github.com/spf13/viper v1.7.1 - github.com/stretchr/testify v1.6.1 + github.com/subosito/gotenv v1.2.0 // indirect github.com/tebeka/strftime v0.1.3 // indirect - github.com/ugorji/go v1.2.3 // indirect - github.com/valyala/fastjson v1.5.1 - github.com/x-cray/logrus-prefixed-formatter v0.5.2 - golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad // indirect - golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4 // indirect - golang.org/x/text v0.3.5 // indirect - golang.org/x/time v0.0.0-20201208040808-7e3f01d25324 - gonum.org/v1/gonum v0.8.1 - google.golang.org/protobuf v1.25.0 // indirect - gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect + github.com/ugorji/go/codec v1.2.3 // indirect + github.com/ziutek/mymysql v1.5.4 // indirect + go.opentelemetry.io/otel v0.19.0 // indirect + go.opentelemetry.io/otel/metric v0.19.0 // indirect + go.opentelemetry.io/otel/trace v0.19.0 // indirect + go.uber.org/atomic v1.9.0 // indirect + golang.org/x/crypto v0.0.0-20220525230936-793ad666bf5e // indirect + golang.org/x/net v0.0.0-20220403103023-749bd193bc2b // indirect + golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c // indirect + golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 // indirect + golang.org/x/text v0.3.7 // indirect + golang.org/x/tools v0.1.9 // indirect + google.golang.org/genproto v0.0.0-20220405205423-9d709892a2bf // indirect gopkg.in/ini.v1 v1.62.0 // indirect - gopkg.in/tucnak/telebot.v2 v2.3.5 gopkg.in/yaml.v2 v2.4.0 // indirect - gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b ) - -// replace ( -// github.com/adshao/go-binance/v2 => ../../adshao/go-binance/v2 -// github.com/c9s/rockhopper => ../rockhopper -// ) diff --git a/go.sum b/go.sum index bc917be3ac..e1615e0218 100644 --- a/go.sum +++ b/go.sum @@ -5,30 +5,60 @@ cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6A cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= +github.com/Azure/azure-sdk-for-go/sdk/azcore v0.19.0/go.mod h1:h6H6c8enJmmocHUbLiiGY6sx7f9i+X3m1CHdd5c6Rdw= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v0.11.0/go.mod h1:HcM1YX14R7CJcghJGOYCgdezslRSVzqwLf/q+4Y2r/0= +github.com/Azure/azure-sdk-for-go/sdk/internal v0.7.0/go.mod h1:yqy467j36fJxcRV2TzfVZ1pCb5vxm4BtZPUdYWe/Xo8= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DATA-DOG/go-sqlmock v1.3.3/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= +github.com/Masterminds/squirrel v1.5.3 h1:YPpoceAcxuzIljlr5iWpNKaql7hLeG1KLSrhvdHpkZc= +github.com/Masterminds/squirrel v1.5.3/go.mod h1:NNaOrjSoIDfDA40n7sr2tPNZRfjzjA400rg+riTZj10= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/adshao/go-binance/v2 v2.2.1-0.20210108025425-9a582c63144e h1:e5AeuM0NLP6mfR6rU/9yDo9Z3yjgfwSCsuBqkqBjpvA= -github.com/adshao/go-binance/v2 v2.2.1-0.20210108025425-9a582c63144e/go.mod h1:o+84WK3DQxq9vEKV9ncRcQi+J7RFCGhM27osbECZiJQ= -github.com/adshao/go-binance/v2 v2.2.1-0.20210119141603-20ceb26d876b h1:5GJlWxRjR3y7nPt/tIRCVcL4x/82yXpTBXsXJoLT0FY= -github.com/adshao/go-binance/v2 v2.2.1-0.20210119141603-20ceb26d876b/go.mod h1:o+84WK3DQxq9vEKV9ncRcQi+J7RFCGhM27osbECZiJQ= +github.com/adshao/go-binance/v2 v2.3.5 h1:WVYZecm0w8l14YoWlnKZj6xxZT2AKMTHpMQSqIX1xxA= +github.com/adshao/go-binance/v2 v2.3.5/go.mod h1:8Pg/FGTLyAhq8QXA0IkoReKyRpoxJcK3LVujKDAZV/c= github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bitly/go-simplejson v0.5.0 h1:6IH+V8/tVMab511d5bn4M7EwGXZf9Hj6i2xSwkNEM+Y= github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA= @@ -37,14 +67,26 @@ github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 h1:DDGfHa7BWjL4Yn github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI= github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= -github.com/c9s/rockhopper v1.2.1-0.20210115022144-cc77e66fc34f h1:n1Ly7178MJj+GQB38q4dV66QktUvzEi2rA7xCtTy6Ck= -github.com/c9s/rockhopper v1.2.1-0.20210115022144-cc77e66fc34f/go.mod h1:KJnQjZSrWA83jjwGF/+O7Y96VCVirYTYEvXJJOc6kMU= +github.com/c9s/requestgen v1.3.0 h1:3cTHvWIlrc37nGEdJLIO07XaVidDeOwcew06csBz++U= +github.com/c9s/requestgen v1.3.0/go.mod h1:5n9FU3hr5307IiXAmbMiZbHYaPiys1u9jCWYexZr9qA= +github.com/c9s/rockhopper v1.2.2-0.20220617053729-ffdc87df194b h1:wT8c03PHLv7+nZUIGqxAzRvIfYHNxMCNVWwvdGkOXTs= +github.com/c9s/rockhopper v1.2.2-0.20220617053729-ffdc87df194b/go.mod h1:EKObf66Cp7erWxym2de+07qNN5T1N9PXxHdh97N44EQ= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= github.com/codingconcepts/env v0.0.0-20200821220118-a8fbf8d84482 h1:5/aEFreBh9hH/0G+33xtczJCvMaulqsm9nDuu2BZUEo= @@ -54,102 +96,155 @@ github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/denisenkom/go-mssqldb v0.9.0 h1:RSohk2RsiZqLZ0zCjtfn3S4Gp4exhpBWHyQ7D0yGjAk= github.com/denisenkom/go-mssqldb v0.9.0/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU= +github.com/denisenkom/go-mssqldb v0.12.2 h1:1OcPn5GBIobjWNd+8yjfHNIaFX14B1pWI3F9HZy5KXw= +github.com/denisenkom/go-mssqldb v0.12.2/go.mod h1:lnIw1mZukFRZDJYQ0Pb833QS2IaC3l5HkEfra2LJ+sk= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= +github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/evanphx/json-patch/v5 v5.6.0 h1:b91NhWfaz02IuVxO9faSllyAtNXHMPkC5J8sJCLunww= +github.com/evanphx/json-patch/v5 v5.6.0/go.mod h1:G79N1coSVB93tBe7j6PhzjmR3/2VvlbKOFpnXhI9Bw4= github.com/fastly/go-utils v0.0.0-20180712184237-d95a45783239 h1:Ghm4eQYC0nEPnSJdVkTrXpu9KtoVCSo1hg7mtI7G9KU= github.com/fastly/go-utils v0.0.0-20180712184237-d95a45783239/go.mod h1:Gdwt2ce0yfBxPvZrHkprdPPTTS3N5rwmLE8T22KBXlw= +github.com/fatih/camelcase v1.0.0 h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8= +github.com/fatih/camelcase v1.0.0/go.mod h1:yN2Sb0lFhZJUdVvtELVWefmrXpuZESvPmqwoZc+/fpc= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/gertd/go-pluralize v0.2.1 h1:M3uASbVjMnTsPb0PNqg+E/24Vwigyo/tvyMTtAlLgiA= +github.com/gertd/go-pluralize v0.2.1/go.mod h1:rbYaKDbsXxmRfr8uygAEKhOWsjyrrqrkHVpZvoOp8zk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/gin-contrib/cors v1.3.1 h1:doAsuITavI4IOcd0Y19U4B+O0dNWihRyX//nn4sEmgA= +github.com/gin-contrib/cors v1.3.1/go.mod h1:jjEJ4268OPZUcU7k9Pm653S7lXUGcqMADzFA61xsmDk= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= -github.com/gin-gonic/gin v1.6.3 h1:ahKqKTFpO5KTPHxWZjEdPScmYaGtLo8Y4DMHoEsnp14= -github.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M= +github.com/gin-gonic/gin v1.5.0/go.mod h1:Nd6IXA8m5kNZdNEHMBd93KT+mdY3+bewLgRvmCsR2Do= +github.com/gin-gonic/gin v1.7.0 h1:jGB9xAJQ12AIGNB4HguylppmDK1Am9ppF7XnGXXJuoU= +github.com/gin-gonic/gin v1.7.0/go.mod h1:jD2toBW3GZUr5UMcdrwQA10I7RuaFOl/SGeDjXkfUtY= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.12.1/go.mod h1:IUMDtCfWo/w/mtMfIE/IG2K+Ey3ygWanZIBtBW0W2TM= github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q= github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/universal-translator v0.16.0/go.mod h1:1AnU7NaIRDWWzGEKwgtJRd2xk99HeFyHw3yid4rvQIY= github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no= github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= -github.com/go-playground/validator/v10 v10.2.0 h1:KgJ0snyC2R9VXYN2rneOtQcw5aHQB1Vv0sFl1UcHBOY= -github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GOhaH6EGOAJShg8Id5JGkI= github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE= github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= -github.com/go-redis/redis/v8 v8.4.0 h1:J5NCReIgh3QgUJu398hUncxDExN4gMOHI11NVbVicGQ= -github.com/go-redis/redis/v8 v8.4.0/go.mod h1:A1tbYoHSa1fXwN+//ljcCYYJeLmVrwL9hbQN45Jdy0M= -github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= -github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs= +github.com/go-redis/redis/v8 v8.8.0 h1:fDZP58UN/1RD3DjtTXP/fFZ04TFohSYhjZDkcDe2dnw= +github.com/go-redis/redis/v8 v8.8.0/go.mod h1:F7resOH5Kdug49Otu24RjHWwgK7u9AmtqWMnCV1iP5Y= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= +github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/go-test/deep v1.0.4/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/go-test/deep v1.0.6 h1:UHSEyLZUwX9Qoi99vVwvewiMC8mM2bf7XEM2nqvzEn8= github.com/go-test/deep v1.0.6/go.mod h1:QV8Hv/iy04NyLBxAdO9njL0iVPN1S4d/A3NVv1V36o8= +github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= +github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= -github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe h1:lXe2qZdvpiX5WZkZR4hgp4KJVfY3nMkvmwbVkpv1rVY= github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= +github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA= +github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= +github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A= +github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2 h1:+Z5KGCizgyZCbGh1KZqA0fcLLkwbsjIzS4aV2v7wJX0= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3 h1:JjCZWpVbqXDqFVmTfYWEVTMIYrL/NPdPSCHPJ0T/raM= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.3 h1:x95R7cp+rSeeqAMI2knLtQ0DKlaBhv2NrtrOvafPHRo= -github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gorilla/websocket v1.2.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= -github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= +github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -172,27 +267,37 @@ github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0m github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/jehiah/go-strftime v0.0.0-20171201141054-1d33003b3869 h1:IPJ3dvxmJ4uczJe5YQdrYB16oTJlGSC/OyZDqUk9xX4= github.com/jehiah/go-strftime v0.0.0-20171201141054-1d33003b3869/go.mod h1:cJ6Cj7dQo+O6GJNiMx+Pa94qKj+TG8ONdKHgMNIyyag= -github.com/jmoiron/sqlx v1.2.0 h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA= -github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= +github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jmoiron/sqlx v1.3.4 h1:wv+0IJZfL5z0uZoUjlpKgHkgaFSYD+r9CfrXjEXsO7w= +github.com/jmoiron/sqlx v1.3.4/go.mod h1:2BljVx/86SuTyjE+aPYlHCTNvZrnJXghYGpNiXLBMCQ= +github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= +github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/jonboulle/clockwork v0.1.0 h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.9 h1:9yzud/Ht36ygwatGx56VwCZtlI/2AD15T1X2sjSuGns= +github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= @@ -201,9 +306,13 @@ github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfn github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq6+3iTQz8KNCLtVX6idSoTLdUw= +github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtBTC4WfIxhKZfyBF/HBFgRZSWwZ9g/He9o= +github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk= +github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6FmdpVm2joNMFikkuWg0EoCKLGUMNw= github.com/leekchan/accounting v0.0.0-20191218023648-17a4ce5f94d4 h1:KZzDAtJ7ZLm0zSWVhN/zgyB8Ksx5H+P9irwbTcJ9FwI= github.com/leekchan/accounting v0.0.0-20191218023648-17a4ce5f94d4/go.mod h1:3timm6YPhY3YDaGxl0q3eaflX0eoSx3FXn7ckHe4tO0= -github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= +github.com/leodido/go-urn v1.1.0/go.mod h1:+cyI34gQWZcE1eQU7NVgKkkzdXDQHr1dBMtdAPozLkw= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= @@ -214,22 +323,30 @@ github.com/lestrrat-go/file-rotatelogs v2.2.0+incompatible/go.mod h1:ZQnN8lSECae github.com/lestrrat-go/strftime v1.0.0 h1:wZIfTHGdu7TeGu318uLJwuQvTMt9UpRyS+XV2Rc4wo4= github.com/lestrrat-go/strftime v1.0.0/go.mod h1:E1nN3pCbtMSu1yjSVeyuRFVm/U0xoR76fd03sz+Qz4g= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.9.0 h1:L8nSXQQzAYByakOFMTwpjRoHsMJklur4Gi59b6VivR8= +github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.10.5 h1:J+gdV2cUmX7ZqL2B0lFcW0m+egaHC2V3lpO8nWxyYiQ= +github.com/lib/pq v1.10.5/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.10.6 h1:jbk+ZieJ0D7EVGJYpL9QTz7/YW6UHbmdnZWYyK5cdBs= +github.com/lib/pq v1.10.6/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lucasb-eyer/go-colorful v1.0.2/go.mod h1:0MS4r+7BZKSJ5mw4/S5MPN+qHFF1fYclkSPilDOKW0s= github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.8.4 h1:8KGKTcQQGm0Kv7vEbKFErAoAOFyyacLStRtQSeYtvkY= github.com/magiconair/properties v1.8.4/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-colorable v0.1.2 h1:/bC9yWikZXAL9uJdulbSfyVNIR3n3trXl+v8+1sx8mU= -github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.9 h1:sqDoxXbdeALODt0DAeJCVp38ps9ZogZEAXjus69YV3U= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= +github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= +github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk= +github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y= github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= -github.com/mattn/go-sqlite3 v2.0.3+incompatible h1:gXHsfypPkaMZrKbD5209QV9jbUTJKjyR5WD3HYQSd+U= -github.com/mattn/go-sqlite3 v2.0.3+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= +github.com/mattn/go-sqlite3 v1.14.13 h1:1tj15ngiFfcZzii7yd82foL+ks+ouQcj8j/TPq3fk1I= +github.com/mattn/go-sqlite3 v1.14.13/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI= github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= @@ -250,22 +367,35 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8= +github.com/muesli/clusters v0.0.0-20180605185049-a07a36e67d36 h1:KMCH+/bbZsAbFgzCXD3aB0DRZXnwAO8NYDmfIfslo+M= +github.com/muesli/clusters v0.0.0-20180605185049-a07a36e67d36/go.mod h1:mw5KDqUj0eLj/6DUNINLVJNoPTFkEuGMHtJsXLviLkY= +github.com/muesli/kmeans v0.3.0 h1:cI2cpeS8m3pm+gTOdzl+7SlzZYSe+x0XoqXUyUvb1ro= +github.com/muesli/kmeans v0.3.0/go.mod h1:eNyybq0tX9/iBEP6EMU4Y7dpmGK0uEhODdZpnG1a/iQ= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= -github.com/onsi/ginkgo v1.14.2 h1:8mVmC9kjFFmA8H4pKMUhcblgifdkOIXPvbhN1T36q1M= -github.com/onsi/ginkgo v1.14.2/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= +github.com/onsi/ginkgo v1.15.0/go.mod h1:hF8qUzuuC8DJGygJH3726JnCZX4MYbRB8yFfISqnKUg= +github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= +github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= +github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= +github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.10.3 h1:gph6h/qe9GSUw1NhH1gp+qb+h8rXD8Cy60Z32Qw3ELA= -github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc= +github.com/onsi/gomega v1.10.5/go.mod h1:gza4q3jKQJijlu05nKWRCW/GavJumGt8aNRxWg7mt48= +github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/onsi/gomega v1.19.0 h1:4ieX6qQjPP/BfC3mpsAtIGGlxTWPeA3Inl/7DtXw1tw= +github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/pelletier/go-toml v1.8.1 h1:1Nf83orprkJyknT6h7zbuEGUEjcyVlCxSUGTENmNCRM= github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= +github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= @@ -278,32 +408,56 @@ github.com/pquerna/otp v1.3.0 h1:oJV/SkzR33anKXwQU3Of42rL4wbrffP4uvUf1SvS5Xs= github.com/pquerna/otp v1.3.0/go.mod h1:dkJfzwRKNiegxyNb54X/3fLwhCynbMspSyWKnvi1AEg= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.11.0 h1:HNkLOAEQMIDv/K+04rukrLx6ch7msSRwf3/SASFAGtQ= +github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.32.1 h1:hWIdL3N2HoUx3B8j3YN9mWor0qhY/NlEKZEaXxuIRh4= +github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU= +github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= github.com/rifflock/lfshook v0.0.0-20180920164130-b9218ef580f5 h1:mZHayPoR0lNmnHyvtYjDeq0zlVHn9K/ZXoy17ylucdo= github.com/rifflock/lfshook v0.0.0-20180920164130-b9218ef580f5/go.mod h1:GEXHk5HgEKCvEIIrSpFI3ozzG5xOKA2DVlEX/gGnewM= github.com/robfig/cron/v3 v3.0.0 h1:kQ6Cb7aHOHTSzNVNEhmp8EcWKLb4CbiMW9h9VyIhO4E= github.com/robfig/cron/v3 v3.0.0/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/sajari/regression v1.0.1 h1:iTVc6ZACGCkoXC+8NdqH5tIreslDTT/bXxT6OmHR5PE= +github.com/sajari/regression v1.0.1/go.mod h1:NeG/XTW1lYfGY7YV/Z0nYDV/RGh3wxwd1yW46835flM= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/sirupsen/logrus v1.7.0 h1:ShrD1U9pZB12TX0cVy0DtePoCH97K8EtX+mg7ZARUtM= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/slack-go/slack v0.6.6-0.20200602212211-b04b8521281b h1:4NIpokK7Rg/k6lSzNQzvGLphpHtfAAaLw9AWHxHQn0w= -github.com/slack-go/slack v0.6.6-0.20200602212211-b04b8521281b/go.mod h1:FGqNzJBmxIsZURAxh2a8D21AnOVvvXZvGligs4npPUM= +github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= +github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/slack-go/slack v0.10.1 h1:BGbxa0kMsGEvLOEoZmYs8T1wWfoZXwmQFBb6FgYCXUA= +github.com/slack-go/slack v0.10.1/go.mod h1:wWL//kk0ho+FcQXcBTmEafUI5dz4qz5f4mMk8oIkioQ= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= @@ -330,40 +484,63 @@ github.com/spf13/viper v1.7.1/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5q github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/tebeka/strftime v0.1.3 h1:5HQXOqWKYRFfNyBMNVc9z5+QzuBtIXy03psIhtdJYto= github.com/tebeka/strftime v0.1.3/go.mod h1:7wJm3dZlpr4l/oVK0t1HYIc4rMzQ2XJlOMIUJUJH6XQ= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/ugorji/go v1.1.7 h1:/68gy2h+1mWMrwZFeD1kQialdSzAb432dtpeJ42ovdo= github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= github.com/ugorji/go v1.2.3 h1:WbFSXLxDFKVN69Sk8t+XHGzVCD7R8UoAATR8NqZgTbk= github.com/ugorji/go v1.2.3/go.mod h1:5l8GZ8hZvmL4uMdy+mhCO1LjswGRYco9Q3HfuisB21A= -github.com/ugorji/go/codec v1.1.7 h1:2SvQaVZ1ouYrrKKwoSk2pzd4A9evlKJb9oTL+OaLUSs= github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= github.com/ugorji/go/codec v1.2.3 h1:/mVYEV+Jo3IZKeA5gBngN0AvNnQltEDkR+eQikkWQu0= github.com/ugorji/go/codec v1.2.3/go.mod h1:5FxzDJIgeiWJZslYHPj+LS1dq1ZBQVelZFnjsFGI/Uc= github.com/valyala/fastjson v1.5.1 h1:SXaQZVSwLjZOVhDEhjiCcDtnX0Feu7Z7A1+C5atpoHM= github.com/valyala/fastjson v1.5.1/go.mod h1:CLCAqky6SMuOcxStkYQvblddUtoRxhYMGLrsQns1aXY= +github.com/wcharczuk/go-chart/v2 v2.1.0/go.mod h1:yx7MvAVNcP/kN9lKXM/NTce4au4DFN99j6i1OwDclNA= +github.com/webview/webview v0.0.0-20210216142346-e0bfdf0e5d90 h1:G/O1RFjhc9hgVYjaPQ0Oceqxf3GwRQl/5XEAWYetjmg= +github.com/webview/webview v0.0.0-20210216142346-e0bfdf0e5d90/go.mod h1:rpXAuuHgyEJb6kXcXldlkOjU6y4x+YcASKKXJNUhh0Y= github.com/x-cray/logrus-prefixed-formatter v0.5.2 h1:00txxvfBM9muc0jiLIEAkAcIMJzfthRT6usrui8uGmg= github.com/x-cray/logrus-prefixed-formatter v0.5.2/go.mod h1:2duySbKsL6M18s5GU7VPsoEPHyzalCE06qoARUCeBBE= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/ziutek/mymysql v1.5.4 h1:GB0qdRGsTwQSBVYuVShFBKaXSnSnYYC2d9knnE1LHFs= github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0= +github.com/zserge/lorca v0.1.9 h1:vbDdkqdp2/rmeg8GlyCewY2X8Z+b0s7BqWyIQL/gakc= +github.com/zserge/lorca v0.1.9/go.mod h1:bVmnIbIRlOcoV285KIRSe4bUABKi7R7384Ycuum6e4A= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opentelemetry.io/otel v0.14.0 h1:YFBEfjCk9MTjaytCNSUkp9Q8lF7QJezA06T71FbQxLQ= -go.opentelemetry.io/otel v0.14.0/go.mod h1:vH5xEuwy7Rts0GNtsCW3HYQoZDY+OmBJ6t1bFGGlxgw= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opentelemetry.io/otel v0.19.0 h1:Lenfy7QHRXPZVsw/12CWpxX6d/JkrX8wrx2vO8G80Ng= +go.opentelemetry.io/otel v0.19.0/go.mod h1:j9bF567N9EfomkSidSfmMwIwIBuP37AMAIzVW85OxSg= +go.opentelemetry.io/otel/metric v0.19.0 h1:dtZ1Ju44gkJkYvo+3qGqVXmf88tc+a42edOywypengg= +go.opentelemetry.io/otel/metric v0.19.0/go.mod h1:8f9fglJPRnXuskQmKpnad31lcLJ2VmNNqIsx/uIwBSc= +go.opentelemetry.io/otel/oteltest v0.19.0 h1:YVfA0ByROYqTwOxqHVZYZExzEpfZor+MU1rU+ip2v9Q= +go.opentelemetry.io/otel/oteltest v0.19.0/go.mod h1:tI4yxwh8U21v7JD6R3BcA/2+RBoTKFexE/PJ/nSO7IA= +go.opentelemetry.io/otel/trace v0.19.0 h1:1ucYlenXIDA1OlHVLDZKX0ObXV5RLaq06DtUKz5e5zc= +go.opentelemetry.io/otel/trace v0.19.0/go.mod h1:4IXiNextNOpPnRlI4ryK69mn5iC84bjBWZQA5DXz/qg= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= +go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.7.0 h1:zaiO/rmgFjbmCXdSYJWQcdvOCsthmdaHfr3Gm2Kx4Ec= +go.uber.org/multierr v1.7.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= @@ -372,9 +549,11 @@ golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad h1:DN0cp81fZ3njFcrLCytUHRSUkqBjfTo4Tx9RJTWs0EY= -golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20201016220609-9e8e0b390897/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20220525230936-793ad666bf5e h1:T8NU3HyQ8ClP4SEE+KbFlg6n0NhuTsN4MyznaarGsZM= +golang.org/x/crypto v0.0.0-20220525230936-793ad666bf5e/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -382,11 +561,17 @@ golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136 h1:A1gGSx58LAGVHUUsOf7IiR0u8Xb6W51gRwfDBhkdcaw= golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6 h1:QE6XYQK6naiK1EPAe1g/ILLxN5RBoH5xkJk3CqlMI/Y= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/image v0.0.0-20200927104501-e162460cd6b5/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -394,10 +579,18 @@ golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -412,18 +605,53 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20201006153459-a7d1128ccaa0 h1:wBouT66WTYFXdxfVdz9sVWARVd/2vfGcmI45D2gj45M= -golang.org/x/net v0.0.0-20201006153459-a7d1128ccaa0/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220403103023-749bd193bc2b h1:vI32FkLJNAWtGD4BwkThwEy6XS7ZLLMHkSkYfF8M0W0= +golang.org/x/net v0.0.0-20220403103023-749bd193bc2b/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -432,35 +660,68 @@ golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210113181707-4bcb84eeeb78 h1:nVuTkr9L6Bq62qpUqKo/RnZCFfzDBL0bYo6w9OJUqZY= -golang.org/x/sys v0.0.0-20210113181707-4bcb84eeeb78/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4 h1:myAQVi0cGEoqQVR5POX+8RR2mrocKqNN1hmeMqhX27k= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/term v0.0.0-20201117132131-f5c789dd3221 h1:/ZHdbVpdR/jk3g30/d4yUL0JU9kksj8+F/bnQUVLGDM= -golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c h1:aFV+BgZ4svzjfabn8ERpuB4JI4N6/rdy1iusx77G3oU= +golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 h1:JGgROgKl9N8DuW20oFS5gxc+lE67/N3FcwmBPMe7ArY= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20201208040808-7e3f01d25324 h1:Hir2P/De0WpUhtrKGGjvSb2YxUgyZ7EFOSLIcSSpiwE= golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -483,9 +744,40 @@ golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= +golang.org/x/tools v0.1.9 h1:j9KsMiaP1c3B0OTQGth0/k+miLGTgLsAFUCrF2vLcF8= +golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo= gonum.org/v1/gonum v0.8.1 h1:wGtP3yGpc5mCLOLeTeBdjeui9oZSz5De0eOjMLC/QuQ= gonum.org/v1/gonum v0.8.1/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0= @@ -497,10 +789,23 @@ google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -510,23 +815,59 @@ google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98 google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20220405205423-9d709892a2bf h1:JTjwKJX9erVpsw17w+OIPP7iAgEkN/r8urhWSunEDTs= +google.golang.org/genproto v0.0.0-20220405205423-9d709892a2bf/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.45.0 h1:NEpgUqV3Z+ZjkqMsxMg11IaDrXY4RY6CQukSGK0uI1M= +google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0 h1:4MY060fB1DLGMB/7MBTLnwQUY6+F09GEiz6SsrNqyzM= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -534,29 +875,38 @@ gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogR gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE= +gopkg.in/go-playground/validator.v9 v9.29.1/go.mod h1:+c9/zcJMFNgbLvly1L1V+PpxWdVbfP1avr/N00E2vyQ= gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.62.0 h1:duBzk771uxoUuOlyRLkHsygud9+5lrlGjdFBb4mSKDU= gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/tucnak/telebot.v2 v2.3.5 h1:TdMJTlG8kvepsvZdy/gPeYEBdwKdwFFjH1AQTua9BOU= -gopkg.in/tucnak/telebot.v2 v2.3.5/go.mod h1:BgaIIx50PSRS9pG59JH+geT82cfvoJU/IaI5TJdN3v8= +gopkg.in/tucnak/telebot.v2 v2.5.0 h1:i+NynLo443Vp+Zn3Gv9JBjh3Z/PaiKAQwcnhNI7y6Po= +gopkg.in/tucnak/telebot.v2 v2.5.0/go.mod h1:BgaIIx50PSRS9pG59JH+geT82cfvoJU/IaI5TJdN3v8= gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/linode/max-grid-usdttwd.sh b/linode/max-grid-usdttwd.sh new file mode 100644 index 0000000000..53a84eb7e4 --- /dev/null +++ b/linode/max-grid-usdttwd.sh @@ -0,0 +1,99 @@ +#!/bin/bash +# +# MAX_API_KEY= +# +# +# MAX_API_SECRET= +# +# +# LOWER_PRICE= +# +# +# UPPER_PRICE= +# +# +# GRID_NUMBER= +# +# +# QUANTITY= +# +# +# PROFIT_SPREAD= +# +# +# SIDE= +# +# +# CATCH_UP= +# +# +# LONG= +set -e +osf=$(uname | tr '[:upper:]' '[:lower:]') +version=v1.13.0 +dist_file=bbgo-$version-$osf-amd64.tar.gz + +apt-get install -y redis-server + +curl -O -L https://github.com/c9s/bbgo/releases/download/$version/$dist_file +tar xzf $dist_file +mv bbgo-$osf bbgo +chmod +x bbgo +mv bbgo /usr/local/bin/bbgo + +useradd --create-home -g users -s /usr/bin/bash bbgo +cd /home/bbgo + +cat < .env.local +export MAX_API_KEY=$MAX_API_KEY +export MAX_API_SECRET=$MAX_API_SECRET +END + +cat < /etc/systemd/system/bbgo.service +[Unit] +Description=bbgo trading bot +After=network.target + +[Install] +WantedBy=multi-user.target + +[Service] +WorkingDirectory=/home/bbgo +# EnvironmentFile=/home/bbgo/envvars +ExecStart=/usr/local/bin/bbgo run --enable-web-server +KillMode=process +User=bbgo +Restart=always +RestartSec=10 +END + +cat < bbgo.yaml +--- +persistence: + json: + directory: var/data + redis: + host: 127.0.0.1 + port: 6379 + db: 0 + +exchangeStrategies: +- on: max + grid: + symbol: USDTTWD + quantity: $QUANTITY + gridNumber: $GRID_NUMBER + profitSpread: $PROFIT_SPREAD + upperPrice: $UPPER_PRICE + lowerPrice: $LOWER_PRICE + side: $SIDE + long: $LONG + catchUp: $CATCH_UP + persistence: + type: redis + store: main +END + +systemctl enable bbgo.service +systemctl daemon-reload +systemctl start bbgo diff --git a/migrations/20200721225616_trades.sql b/migrations/20200721225616_trades.sql deleted file mode 100644 index 12d4e4c771..0000000000 --- a/migrations/20200721225616_trades.sql +++ /dev/null @@ -1,23 +0,0 @@ --- +up -CREATE TABLE `trades` -( - `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, - - `id` BIGINT UNSIGNED, - `exchange` VARCHAR(24) NOT NULL DEFAULT '', - `symbol` VARCHAR(8) NOT NULL, - `price` DECIMAL(16, 8) UNSIGNED NOT NULL, - `quantity` DECIMAL(16, 8) UNSIGNED NOT NULL, - `quote_quantity` DECIMAL(16, 8) UNSIGNED NOT NULL, - `fee` DECIMAL(16, 8) UNSIGNED NOT NULL, - `fee_currency` VARCHAR(4) NOT NULL, - `is_buyer` BOOLEAN NOT NULL DEFAULT FALSE, - `is_maker` BOOLEAN NOT NULL DEFAULT FALSE, - `side` VARCHAR(4) NOT NULL DEFAULT '', - `traded_at` DATETIME(3) NOT NULL, - - PRIMARY KEY (`gid`), - UNIQUE KEY `id` (`id`) -); --- +down -DROP TABLE `trades`; diff --git a/migrations/20201103173342_trades_add_order_id.sql b/migrations/20201103173342_trades_add_order_id.sql deleted file mode 100644 index 6af8910fdc..0000000000 --- a/migrations/20201103173342_trades_add_order_id.sql +++ /dev/null @@ -1,7 +0,0 @@ --- +up -ALTER TABLE `trades` - ADD COLUMN `order_id` BIGINT UNSIGNED NOT NULL; - --- +down -ALTER TABLE `trades` - DROP COLUMN `order_id`; diff --git a/migrations/20201211175751_fix_symbol_length.sql b/migrations/20201211175751_fix_symbol_length.sql deleted file mode 100644 index e0965ee422..0000000000 --- a/migrations/20201211175751_fix_symbol_length.sql +++ /dev/null @@ -1,7 +0,0 @@ --- +up -ALTER TABLE trades MODIFY COLUMN symbol VARCHAR(9); -ALTER TABLE orders MODIFY COLUMN symbol VARCHAR(9); - --- +down -ALTER TABLE trades MODIFY COLUMN symbol VARCHAR(8); -ALTER TABLE orders MODIFY COLUMN symbol VARCHAR(8); diff --git a/migrations/20210118163847_fix_unique_index.sql b/migrations/20210118163847_fix_unique_index.sql deleted file mode 100644 index d61819f078..0000000000 --- a/migrations/20210118163847_fix_unique_index.sql +++ /dev/null @@ -1,15 +0,0 @@ --- +up --- +begin -ALTER TABLE `trades` DROP INDEX `id`; --- +end --- +begin -ALTER TABLE `trades` ADD UNIQUE INDEX `id` (`exchange`,`symbol`, `side`, `id`); --- +end - --- +down --- +begin -ALTER TABLE `trades` DROP INDEX `id`; --- +end --- +begin -ALTER TABLE `trades` ADD UNIQUE INDEX `id` (`id`); --- +end diff --git a/migrations/20210119232826_add_margin_columns.sql b/migrations/20210119232826_add_margin_columns.sql deleted file mode 100644 index e4da7f59e3..0000000000 --- a/migrations/20210119232826_add_margin_columns.sql +++ /dev/null @@ -1,19 +0,0 @@ --- +up -ALTER TABLE `trades` - ADD COLUMN `is_margin` BOOLEAN NOT NULL DEFAULT FALSE, - ADD COLUMN `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE - ; - -ALTER TABLE `orders` - ADD COLUMN `is_margin` BOOLEAN NOT NULL DEFAULT FALSE, - ADD COLUMN `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE - ; - --- +down -ALTER TABLE `trades` - DROP COLUMN `is_margin`, - DROP COLUMN `is_isolated`; - -ALTER TABLE `orders` - DROP COLUMN `is_margin`, - DROP COLUMN `is_isolated`; diff --git a/migrations/mysql/20200721225616_trades.sql b/migrations/mysql/20200721225616_trades.sql new file mode 100644 index 0000000000..98ba49b3ef --- /dev/null +++ b/migrations/mysql/20200721225616_trades.sql @@ -0,0 +1,41 @@ +-- +up +CREATE TABLE `trades` +( + `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + + `id` BIGINT UNSIGNED, + `order_id` BIGINT UNSIGNED NOT NULL, + `exchange` VARCHAR(24) NOT NULL DEFAULT '', + `symbol` VARCHAR(20) NOT NULL, + `price` DECIMAL(16, 8) UNSIGNED NOT NULL, + `quantity` DECIMAL(16, 8) UNSIGNED NOT NULL, + `quote_quantity` DECIMAL(16, 8) UNSIGNED NOT NULL, + `fee` DECIMAL(16, 8) UNSIGNED NOT NULL, + `fee_currency` VARCHAR(10) NOT NULL, + `is_buyer` BOOLEAN NOT NULL DEFAULT FALSE, + `is_maker` BOOLEAN NOT NULL DEFAULT FALSE, + `side` VARCHAR(4) NOT NULL DEFAULT '', + `traded_at` DATETIME(3) NOT NULL, + + `is_margin` BOOLEAN NOT NULL DEFAULT FALSE, + `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE, + + `strategy` VARCHAR(32) NULL, + `pnl` DECIMAL NULL, + + PRIMARY KEY (`gid`), + UNIQUE KEY `id` (`exchange`, `symbol`, `side`, `id`) +); + +CREATE INDEX trades_symbol ON trades (exchange, symbol); +CREATE INDEX trades_symbol_fee_currency ON trades (exchange, symbol, fee_currency, traded_at); +CREATE INDEX trades_traded_at_symbol ON trades (exchange, traded_at, symbol); + + +-- +down +DROP TABLE IF EXISTS `trades`; + +DROP INDEX trades_symbol ON trades; +DROP INDEX trades_symbol_fee_currency ON trades; +DROP INDEX trades_traded_at_symbol ON trades; + diff --git a/migrations/mysql/20200819054742_trade_index.sql b/migrations/mysql/20200819054742_trade_index.sql new file mode 100644 index 0000000000..06569c6673 --- /dev/null +++ b/migrations/mysql/20200819054742_trade_index.sql @@ -0,0 +1,5 @@ +-- +up +SELECT 1; + +-- +down +SELECT 1; diff --git a/migrations/20201102222546_orders.sql b/migrations/mysql/20201102222546_orders.sql similarity index 69% rename from migrations/20201102222546_orders.sql rename to migrations/mysql/20201102222546_orders.sql index ec5eb0e114..32a85e7084 100644 --- a/migrations/20201102222546_orders.sql +++ b/migrations/mysql/20201102222546_orders.sql @@ -6,9 +6,9 @@ CREATE TABLE `orders` `exchange` VARCHAR(24) NOT NULL DEFAULT '', -- order_id is the order id returned from the exchange `order_id` BIGINT UNSIGNED NOT NULL, - `client_order_id` VARCHAR(42) NOT NULL DEFAULT '', + `client_order_id` VARCHAR(122) NOT NULL DEFAULT '', `order_type` VARCHAR(16) NOT NULL, - `symbol` VARCHAR(8) NOT NULL, + `symbol` VARCHAR(20) NOT NULL, `status` VARCHAR(12) NOT NULL, `time_in_force` VARCHAR(4) NOT NULL, `price` DECIMAL(16, 8) UNSIGNED NOT NULL, @@ -19,8 +19,16 @@ CREATE TABLE `orders` `is_working` BOOL NOT NULL DEFAULT FALSE, `created_at` DATETIME(3) NOT NULL, `updated_at` DATETIME(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3), - PRIMARY KEY (`gid`) + `is_margin` BOOLEAN NOT NULL DEFAULT FALSE, + `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE, + + PRIMARY KEY (`gid`) ); +CREATE INDEX orders_symbol ON orders (exchange, symbol); +CREATE UNIQUE INDEX orders_order_id ON orders (order_id, exchange); + -- +down +DROP INDEX orders_symbol ON orders; +DROP INDEX orders_order_id ON orders; DROP TABLE `orders`; diff --git a/migrations/mysql/20201103173342_trades_add_order_id.sql b/migrations/mysql/20201103173342_trades_add_order_id.sql new file mode 100644 index 0000000000..06569c6673 --- /dev/null +++ b/migrations/mysql/20201103173342_trades_add_order_id.sql @@ -0,0 +1,5 @@ +-- +up +SELECT 1; + +-- +down +SELECT 1; diff --git a/migrations/mysql/20201105092857_trades_index_fix.sql b/migrations/mysql/20201105092857_trades_index_fix.sql new file mode 100644 index 0000000000..856227a2f4 --- /dev/null +++ b/migrations/mysql/20201105092857_trades_index_fix.sql @@ -0,0 +1,8 @@ +-- +up +SELECT 1; + +-- +down +SELECT 1; + + + diff --git a/migrations/mysql/20201105093056_orders_add_index.sql b/migrations/mysql/20201105093056_orders_add_index.sql new file mode 100644 index 0000000000..06569c6673 --- /dev/null +++ b/migrations/mysql/20201105093056_orders_add_index.sql @@ -0,0 +1,5 @@ +-- +up +SELECT 1; + +-- +down +SELECT 1; diff --git a/migrations/20201106114742_klines.sql b/migrations/mysql/20201106114742_klines.sql similarity index 73% rename from migrations/20201106114742_klines.sql rename to migrations/mysql/20201106114742_klines.sql index ab1cfd7499..755f98f814 100644 --- a/migrations/20201106114742_klines.sql +++ b/migrations/mysql/20201106114742_klines.sql @@ -6,12 +6,12 @@ CREATE TABLE `klines` `start_time` DATETIME(3) NOT NULL, `end_time` DATETIME(3) NOT NULL, `interval` VARCHAR(3) NOT NULL, - `symbol` VARCHAR(7) NOT NULL, - `open` DECIMAL(16, 8) UNSIGNED NOT NULL, - `high` DECIMAL(16, 8) UNSIGNED NOT NULL, - `low` DECIMAL(16, 8) UNSIGNED NOT NULL, - `close` DECIMAL(16, 8) UNSIGNED NOT NULL DEFAULT 0.0, - `volume` DECIMAL(16, 8) UNSIGNED NOT NULL DEFAULT 0.0, + `symbol` VARCHAR(20) NOT NULL, + `open` DECIMAL(20, 8) UNSIGNED NOT NULL, + `high` DECIMAL(20, 8) UNSIGNED NOT NULL, + `low` DECIMAL(20, 8) UNSIGNED NOT NULL, + `close` DECIMAL(20, 8) UNSIGNED NOT NULL DEFAULT 0.0, + `volume` DECIMAL(20, 8) UNSIGNED NOT NULL DEFAULT 0.0, `closed` BOOL NOT NULL DEFAULT TRUE, `last_trade_id` INT UNSIGNED NOT NULL DEFAULT 0, `num_trades` INT UNSIGNED NOT NULL DEFAULT 0, diff --git a/migrations/mysql/20201211175751_fix_symbol_length.sql b/migrations/mysql/20201211175751_fix_symbol_length.sql new file mode 100644 index 0000000000..06569c6673 --- /dev/null +++ b/migrations/mysql/20201211175751_fix_symbol_length.sql @@ -0,0 +1,5 @@ +-- +up +SELECT 1; + +-- +down +SELECT 1; diff --git a/migrations/mysql/20210118163847_fix_unique_index.sql b/migrations/mysql/20210118163847_fix_unique_index.sql new file mode 100644 index 0000000000..8a69dd96a4 --- /dev/null +++ b/migrations/mysql/20210118163847_fix_unique_index.sql @@ -0,0 +1,9 @@ +-- +up +-- +begin +SELECT 1; +-- +end + +-- +down +-- +begin +SELECT 1; +-- +end diff --git a/migrations/mysql/20210119232826_add_margin_columns.sql b/migrations/mysql/20210119232826_add_margin_columns.sql new file mode 100644 index 0000000000..f4e29ff622 --- /dev/null +++ b/migrations/mysql/20210119232826_add_margin_columns.sql @@ -0,0 +1,4 @@ +-- +up +SELECT 1; +-- +down +SELECT 1; diff --git a/migrations/mysql/20210129182704_trade_price_quantity_index.sql b/migrations/mysql/20210129182704_trade_price_quantity_index.sql new file mode 100644 index 0000000000..6e771ad32f --- /dev/null +++ b/migrations/mysql/20210129182704_trade_price_quantity_index.sql @@ -0,0 +1,10 @@ +-- +up +-- +begin +CREATE INDEX trades_price_quantity ON trades (order_id,price,quantity); +-- +end + +-- +down + +-- +begin +DROP INDEX trades_price_quantity ON trades +-- +end diff --git a/migrations/mysql/20210215203116_add_pnl_column.sql b/migrations/mysql/20210215203116_add_pnl_column.sql new file mode 100644 index 0000000000..8a69dd96a4 --- /dev/null +++ b/migrations/mysql/20210215203116_add_pnl_column.sql @@ -0,0 +1,9 @@ +-- +up +-- +begin +SELECT 1; +-- +end + +-- +down +-- +begin +SELECT 1; +-- +end diff --git a/migrations/mysql/20210223080622_add_rewards_table.sql b/migrations/mysql/20210223080622_add_rewards_table.sql new file mode 100644 index 0000000000..ca7c403860 --- /dev/null +++ b/migrations/mysql/20210223080622_add_rewards_table.sql @@ -0,0 +1,32 @@ +-- +up +CREATE TABLE `rewards` +( + `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + + -- for exchange + `exchange` VARCHAR(24) NOT NULL DEFAULT '', + + -- reward record id + `uuid` VARCHAR(32) NOT NULL, + `reward_type` VARCHAR(24) NOT NULL DEFAULT '', + + -- currency symbol, BTC, MAX, USDT ... etc + `currency` VARCHAR(5) NOT NULL, + + -- the quantity of the rewards + `quantity` DECIMAL(16, 8) UNSIGNED NOT NULL, + + `state` VARCHAR(5) NOT NULL, + + `created_at` DATETIME NOT NULL, + + `spent` BOOLEAN NOT NULL DEFAULT FALSE, + + `note` TEXT NULL, + + PRIMARY KEY (`gid`), + UNIQUE KEY `uuid` (`exchange`, `uuid`) +); + +-- +down +DROP TABLE IF EXISTS `rewards`; diff --git a/migrations/mysql/20210301140656_add_withdraws_table.sql b/migrations/mysql/20210301140656_add_withdraws_table.sql new file mode 100644 index 0000000000..73b1efea33 --- /dev/null +++ b/migrations/mysql/20210301140656_add_withdraws_table.sql @@ -0,0 +1,28 @@ +-- +up +-- +begin +CREATE TABLE `withdraws` +( + `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + `exchange` VARCHAR(24) NOT NULL DEFAULT '', + + -- asset is the asset name (currency) + `asset` VARCHAR(10) NOT NULL, + + `address` VARCHAR(128) NOT NULL, + `network` VARCHAR(32) NOT NULL DEFAULT '', + + `amount` DECIMAL(16, 8) NOT NULL, + `txn_id` VARCHAR(256) NOT NULL, + `txn_fee` DECIMAL(16, 8) NOT NULL DEFAULT 0, + `txn_fee_currency` VARCHAR(32) NOT NULL DEFAULT '', + `time` DATETIME(3) NOT NULL, + + PRIMARY KEY (`gid`), + UNIQUE KEY `txn_id` (`exchange`, `txn_id`) +); +-- +end + +-- +down +-- +begin +DROP TABLE IF EXISTS `withdraws`; +-- +end diff --git a/migrations/mysql/20210307201830_add_deposits_table.sql b/migrations/mysql/20210307201830_add_deposits_table.sql new file mode 100644 index 0000000000..dba549e524 --- /dev/null +++ b/migrations/mysql/20210307201830_add_deposits_table.sql @@ -0,0 +1,26 @@ +-- +up +-- +begin +CREATE TABLE `deposits` +( + `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + `exchange` VARCHAR(24) NOT NULL, + + -- asset is the asset name (currency) + `asset` VARCHAR(10) NOT NULL, + + `address` VARCHAR(128) NOT NULL DEFAULT '', + `amount` DECIMAL(16, 8) NOT NULL, + `txn_id` VARCHAR(256) NOT NULL, + `time` DATETIME(3) NOT NULL, + + PRIMARY KEY (`gid`), + UNIQUE KEY `txn_id` (`exchange`, `txn_id`) +); +-- +end + + +-- +down + +-- +begin +DROP TABLE IF EXISTS `deposits`; +-- +end diff --git a/migrations/mysql/20210416230730_klines_symbol_length.sql b/migrations/mysql/20210416230730_klines_symbol_length.sql new file mode 100644 index 0000000000..b7df1262f5 --- /dev/null +++ b/migrations/mysql/20210416230730_klines_symbol_length.sql @@ -0,0 +1,25 @@ +-- +up +ALTER TABLE `klines` +MODIFY COLUMN `symbol` VARCHAR(10) NOT NULL; + +ALTER TABLE `okex_klines` +MODIFY COLUMN `symbol` VARCHAR(10) NOT NULL; + +ALTER TABLE `binance_klines` +MODIFY COLUMN `symbol` VARCHAR(10) NOT NULL; + +ALTER TABLE `max_klines` +MODIFY COLUMN `symbol` VARCHAR(10) NOT NULL; + +-- +down +ALTER TABLE `klines` +MODIFY COLUMN `symbol` VARCHAR(7) NOT NULL; + +ALTER TABLE `okex_klines` +MODIFY COLUMN `symbol` VARCHAR(7) NOT NULL; + +ALTER TABLE `binance_klines` +MODIFY COLUMN `symbol` VARCHAR(7) NOT NULL; + +ALTER TABLE `max_klines` +MODIFY COLUMN `symbol` VARCHAR(7) NOT NULL; diff --git a/migrations/mysql/20210421091430_increase_symbol_length.sql b/migrations/mysql/20210421091430_increase_symbol_length.sql new file mode 100644 index 0000000000..6c1251aec1 --- /dev/null +++ b/migrations/mysql/20210421091430_increase_symbol_length.sql @@ -0,0 +1,25 @@ +-- +up +ALTER TABLE `klines` +MODIFY COLUMN `symbol` VARCHAR(12) NOT NULL; + +ALTER TABLE `okex_klines` +MODIFY COLUMN `symbol` VARCHAR(12) NOT NULL; + +ALTER TABLE `binance_klines` +MODIFY COLUMN `symbol` VARCHAR(12) NOT NULL; + +ALTER TABLE `max_klines` +MODIFY COLUMN `symbol` VARCHAR(12) NOT NULL; + +-- +down +ALTER TABLE `klines` +MODIFY COLUMN `symbol` VARCHAR(10) NOT NULL; + +ALTER TABLE `okex_klines` +MODIFY COLUMN `symbol` VARCHAR(10) NOT NULL; + +ALTER TABLE `binance_klines` +MODIFY COLUMN `symbol` VARCHAR(10) NOT NULL; + +ALTER TABLE `max_klines` +MODIFY COLUMN `symbol` VARCHAR(10) NOT NULL; diff --git a/migrations/mysql/20210421095030_increase_decimal_length.sql b/migrations/mysql/20210421095030_increase_decimal_length.sql new file mode 100644 index 0000000000..0b0e4454bb --- /dev/null +++ b/migrations/mysql/20210421095030_increase_decimal_length.sql @@ -0,0 +1,25 @@ +-- +up +ALTER TABLE `klines` +MODIFY COLUMN `volume` decimal(20,8) unsigned NOT NULL DEFAULT '0.00000000'; + +ALTER TABLE `okex_klines` +MODIFY COLUMN `volume` decimal(20,8) unsigned NOT NULL DEFAULT '0.00000000'; + +ALTER TABLE `binance_klines` +MODIFY COLUMN `volume` decimal(20,8) unsigned NOT NULL DEFAULT '0.00000000'; + +ALTER TABLE `max_klines` +MODIFY COLUMN `volume` decimal(20,8) unsigned NOT NULL DEFAULT '0.00000000'; + +-- +down +ALTER TABLE `klines` +MODIFY COLUMN `volume` decimal(16,8) unsigned NOT NULL DEFAULT '0.00000000'; + +ALTER TABLE `okex_klines` +MODIFY COLUMN `volume` decimal(16,8) unsigned NOT NULL DEFAULT '0.00000000'; + +ALTER TABLE `binance_klines` +MODIFY COLUMN `volume` decimal(16,8) unsigned NOT NULL DEFAULT '0.00000000'; + +ALTER TABLE `max_klines` +MODIFY COLUMN `volume` decimal(16,8) unsigned NOT NULL DEFAULT '0.00000000'; diff --git a/migrations/mysql/20210531234123_add_kline_taker_buy_columns.sql b/migrations/mysql/20210531234123_add_kline_taker_buy_columns.sql new file mode 100644 index 0000000000..ce397f8e03 --- /dev/null +++ b/migrations/mysql/20210531234123_add_kline_taker_buy_columns.sql @@ -0,0 +1,47 @@ +-- +up +-- +begin +ALTER TABLE `binance_klines` + ADD COLUMN `quote_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0, + ADD COLUMN `taker_buy_base_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0, + ADD COLUMN `taker_buy_quote_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0; +-- +end +-- +begin +ALTER TABLE `max_klines` + ADD COLUMN `quote_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0, + ADD COLUMN `taker_buy_base_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0, + ADD COLUMN `taker_buy_quote_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0; +-- +end +-- +begin +ALTER TABLE `okex_klines` + ADD COLUMN `quote_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0, + ADD COLUMN `taker_buy_base_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0, + ADD COLUMN `taker_buy_quote_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0; +-- +end +-- +begin +ALTER TABLE `klines` + ADD COLUMN `quote_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0, + ADD COLUMN `taker_buy_base_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0, + ADD COLUMN `taker_buy_quote_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0; +-- +end + +-- +down +-- +begin +ALTER TABLE `binance_klines` + DROP COLUMN `quote_volume`, + DROP COLUMN `taker_buy_base_volume`, + DROP COLUMN `taker_buy_quote_volume`; +-- +end + +-- +begin +ALTER TABLE `max_klines` + DROP COLUMN `quote_volume`, + DROP COLUMN `taker_buy_base_volume`, + DROP COLUMN `taker_buy_quote_volume`; +-- +end + +-- +begin +ALTER TABLE `okex_klines` + DROP COLUMN `quote_volume`, + DROP COLUMN `taker_buy_base_volume`, + DROP COLUMN `taker_buy_quote_volume`; +-- +end diff --git a/migrations/mysql/20211205162043_add_is_futures_column.sql b/migrations/mysql/20211205162043_add_is_futures_column.sql new file mode 100644 index 0000000000..b2929edd23 --- /dev/null +++ b/migrations/mysql/20211205162043_add_is_futures_column.sql @@ -0,0 +1,18 @@ +-- +up +-- +begin +ALTER TABLE `trades` ADD COLUMN `is_futures` BOOLEAN NOT NULL DEFAULT FALSE; +-- +end + +-- +begin +ALTER TABLE `orders` ADD COLUMN `is_futures` BOOLEAN NOT NULL DEFAULT FALSE; +-- +end + +-- +down + +-- +begin +ALTER TABLE `trades` DROP COLUMN `is_futures`; +-- +end + +-- +begin +ALTER TABLE `orders` DROP COLUMN `is_futures`; +-- +end diff --git a/migrations/mysql/20211211020303_add_ftx_kline.sql b/migrations/mysql/20211211020303_add_ftx_kline.sql new file mode 100644 index 0000000000..3ba384f564 --- /dev/null +++ b/migrations/mysql/20211211020303_add_ftx_kline.sql @@ -0,0 +1,33 @@ +-- +up +-- +begin +create table if not exists ftx_klines +( + gid bigint unsigned auto_increment + primary key, + exchange varchar(10) not null, + start_time datetime(3) not null, + end_time datetime(3) not null, + `interval` varchar(3) not null, + symbol varchar(20) not null, + open decimal(20,8) unsigned not null, + high decimal(20,8) unsigned not null, + low decimal(20,8) unsigned not null, + close decimal(20,8) unsigned default 0.00000000 not null, + volume decimal(20,8) unsigned default 0.00000000 not null, + closed tinyint(1) default 1 not null, + last_trade_id int unsigned default '0' not null, + num_trades int unsigned default '0' not null, + quote_volume decimal(32,4) default 0.0000 not null, + taker_buy_base_volume decimal(32,8) not null, + taker_buy_quote_volume decimal(32,4) default 0.0000 not null + ); +-- +end +-- +begin +create index klines_end_time_symbol_interval + on ftx_klines (end_time, symbol, `interval`); +-- +end +-- +down + +-- +begin +drop table ftx_klines; +-- +end diff --git a/migrations/mysql/20211211034819_add_nav_history_details.sql b/migrations/mysql/20211211034819_add_nav_history_details.sql new file mode 100644 index 0000000000..965fe800fc --- /dev/null +++ b/migrations/mysql/20211211034819_add_nav_history_details.sql @@ -0,0 +1,26 @@ +-- +up +-- +begin +CREATE TABLE nav_history_details +( + gid bigint unsigned auto_increment PRIMARY KEY, + exchange VARCHAR(30) NOT NULL, + subaccount VARCHAR(30) NOT NULL, + time DATETIME(3) NOT NULL, + currency VARCHAR(12) NOT NULL, + balance_in_usd DECIMAL(32, 8) UNSIGNED DEFAULT 0.00000000 NOT NULL, + balance_in_btc DECIMAL(32, 8) UNSIGNED DEFAULT 0.00000000 NOT NULL, + balance DECIMAL(32, 8) UNSIGNED DEFAULT 0.00000000 NOT NULL, + available DECIMAL(32, 8) UNSIGNED DEFAULT 0.00000000 NOT NULL, + locked DECIMAL(32, 8) UNSIGNED DEFAULT 0.00000000 NOT NULL +); +-- +end +-- +begin +CREATE INDEX idx_nav_history_details + on nav_history_details (time, currency, exchange); +-- +end + +-- +down + +-- +begin +DROP TABLE nav_history_details; +-- +end diff --git a/migrations/mysql/20211211103657_update_fee_currency_length.sql b/migrations/mysql/20211211103657_update_fee_currency_length.sql new file mode 100644 index 0000000000..8a69dd96a4 --- /dev/null +++ b/migrations/mysql/20211211103657_update_fee_currency_length.sql @@ -0,0 +1,9 @@ +-- +up +-- +begin +SELECT 1; +-- +end + +-- +down +-- +begin +SELECT 1; +-- +end diff --git a/migrations/mysql/20211226022411_add_kucoin_klines.sql b/migrations/mysql/20211226022411_add_kucoin_klines.sql new file mode 100644 index 0000000000..4d65d58ef6 --- /dev/null +++ b/migrations/mysql/20211226022411_add_kucoin_klines.sql @@ -0,0 +1,10 @@ +-- +up +-- +begin +CREATE TABLE `kucoin_klines` LIKE `binance_klines`; +-- +end + +-- +down + +-- +begin +DROP TABLE `kucoin_klines`; +-- +end diff --git a/migrations/mysql/20220304153317_add_profit_table.sql b/migrations/mysql/20220304153317_add_profit_table.sql new file mode 100644 index 0000000000..96037a09cd --- /dev/null +++ b/migrations/mysql/20220304153317_add_profit_table.sql @@ -0,0 +1,71 @@ +-- +up +CREATE TABLE `profits` +( + `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + + `strategy` VARCHAR(32) NOT NULL, + `strategy_instance_id` VARCHAR(64) NOT NULL, + + `symbol` VARCHAR(8) NOT NULL, + + -- average_cost is the position average cost + `average_cost` DECIMAL(16, 8) UNSIGNED NOT NULL, + + -- profit is the pnl (profit and loss) + `profit` DECIMAL(16, 8) NOT NULL, + + -- net_profit is the pnl (profit and loss) + `net_profit` DECIMAL(16, 8) NOT NULL, + + -- profit_margin is the pnl (profit and loss) + `profit_margin` DECIMAL(16, 8) NOT NULL, + + -- net_profit_margin is the pnl (profit and loss) + `net_profit_margin` DECIMAL(16, 8) NOT NULL, + + `quote_currency` VARCHAR(10) NOT NULL, + + `base_currency` VARCHAR(10) NOT NULL, + + -- ------------------------------------------------------- + -- embedded trade data -- + -- ------------------------------------------------------- + `exchange` VARCHAR(24) NOT NULL DEFAULT '', + + `is_futures` BOOLEAN NOT NULL DEFAULT FALSE, + + `is_margin` BOOLEAN NOT NULL DEFAULT FALSE, + + `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE, + + `trade_id` BIGINT UNSIGNED NOT NULL, + + -- side is the side of the trade that makes profit + `side` VARCHAR(4) NOT NULL DEFAULT '', + + `is_buyer` BOOLEAN NOT NULL DEFAULT FALSE, + + `is_maker` BOOLEAN NOT NULL DEFAULT FALSE, + + -- price is the price of the trade that makes profit + `price` DECIMAL(16, 8) UNSIGNED NOT NULL, + + -- quantity is the quantity of the trade that makes profit + `quantity` DECIMAL(16, 8) UNSIGNED NOT NULL, + + -- quote_quantity is the quote quantity of the trade that makes profit + `quote_quantity` DECIMAL(16, 8) UNSIGNED NOT NULL, + + `traded_at` DATETIME(3) NOT NULL, + + -- fee + `fee_in_usd` DECIMAL(16, 8), + `fee` DECIMAL(16, 8) NOT NULL, + `fee_currency` VARCHAR(10) NOT NULL, + + PRIMARY KEY (`gid`), + UNIQUE KEY `trade_id` (`trade_id`) +); + +-- +down +DROP TABLE IF EXISTS `profits`; diff --git a/migrations/mysql/20220307132917_add_positions.sql b/migrations/mysql/20220307132917_add_positions.sql new file mode 100644 index 0000000000..ca10e14d91 --- /dev/null +++ b/migrations/mysql/20220307132917_add_positions.sql @@ -0,0 +1,30 @@ +-- +up +CREATE TABLE `positions` +( + `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + + `strategy` VARCHAR(32) NOT NULL, + `strategy_instance_id` VARCHAR(64) NOT NULL, + + `symbol` VARCHAR(20) NOT NULL, + `quote_currency` VARCHAR(10) NOT NULL, + `base_currency` VARCHAR(10) NOT NULL, + + -- average_cost is the position average cost + `average_cost` DECIMAL(16, 8) UNSIGNED NOT NULL, + `base` DECIMAL(16, 8) NOT NULL, + `quote` DECIMAL(16, 8) NOT NULL, + `profit` DECIMAL(16, 8) NULL, + + -- trade related columns + `trade_id` BIGINT UNSIGNED NOT NULL, -- the trade id in the exchange + `side` VARCHAR(4) NOT NULL, -- side of the trade + `exchange` VARCHAR(12) NOT NULL, -- exchange of the trade + `traded_at` DATETIME(3) NOT NULL, -- millisecond timestamp + + PRIMARY KEY (`gid`), + UNIQUE KEY `trade_id` (`trade_id`, `side`, `exchange`) +); + +-- +down +DROP TABLE IF EXISTS `positions`; diff --git a/migrations/mysql/20220317125555_fix_trade_indexes.sql b/migrations/mysql/20220317125555_fix_trade_indexes.sql new file mode 100644 index 0000000000..a52f1b3fa8 --- /dev/null +++ b/migrations/mysql/20220317125555_fix_trade_indexes.sql @@ -0,0 +1,19 @@ +-- +up +DROP INDEX trades_symbol ON trades; +DROP INDEX trades_symbol_fee_currency ON trades; +DROP INDEX trades_traded_at_symbol ON trades; + +-- this index is used for general trade query +CREATE INDEX trades_traded_at ON trades (traded_at, symbol, exchange, id, fee_currency, fee); +-- this index is used for join clause by trade_id +CREATE INDEX trades_id_traded_at ON trades (id, traded_at); +-- this index is used for join clause by order id +CREATE INDEX trades_order_id_traded_at ON trades (order_id, traded_at); + +-- +down +DROP INDEX trades_traded_at ON trades; +DROP INDEX trades_id_traded_at ON trades; +DROP INDEX trades_order_id_traded_at ON trades; +CREATE INDEX trades_symbol ON trades (exchange, symbol); +CREATE INDEX trades_symbol_fee_currency ON trades (exchange, symbol, fee_currency, traded_at); +CREATE INDEX trades_traded_at_symbol ON trades (exchange, traded_at, symbol); diff --git a/migrations/mysql/20220419121046_fix_fee_column.sql b/migrations/mysql/20220419121046_fix_fee_column.sql new file mode 100644 index 0000000000..c1bc5a62ed --- /dev/null +++ b/migrations/mysql/20220419121046_fix_fee_column.sql @@ -0,0 +1,21 @@ +-- +up +-- +begin +ALTER TABLE trades + CHANGE fee fee DECIMAL(16, 8) NOT NULL; +-- +end + +-- +begin +ALTER TABLE profits + CHANGE fee fee DECIMAL(16, 8) NOT NULL; +-- +end + +-- +begin +ALTER TABLE profits + CHANGE fee_in_usd fee_in_usd DECIMAL(16, 8); +-- +end + +-- +down + +-- +begin +SELECT 1; +-- +end diff --git a/migrations/mysql/20220503144849_add_margin_info_to_nav.sql b/migrations/mysql/20220503144849_add_margin_info_to_nav.sql new file mode 100644 index 0000000000..618b3435fe --- /dev/null +++ b/migrations/mysql/20220503144849_add_margin_info_to_nav.sql @@ -0,0 +1,27 @@ +-- +up +-- +begin +ALTER TABLE `nav_history_details` + ADD COLUMN `session` VARCHAR(30) NOT NULL, + ADD COLUMN `is_margin` BOOLEAN NOT NULL DEFAULT FALSE, + ADD COLUMN `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE, + ADD COLUMN `isolated_symbol` VARCHAR(30) NOT NULL DEFAULT '', + ADD COLUMN `net_asset` DECIMAL(32, 8) UNSIGNED DEFAULT 0.00000000 NOT NULL, + ADD COLUMN `borrowed` DECIMAL(32, 8) UNSIGNED DEFAULT 0.00000000 NOT NULL, + ADD COLUMN `price_in_usd` DECIMAL(32, 8) UNSIGNED DEFAULT 0.00000000 NOT NULL +; +-- +end + + +-- +down + +-- +begin +ALTER TABLE `nav_history_details` + DROP COLUMN `session`, + DROP COLUMN `net_asset`, + DROP COLUMN `borrowed`, + DROP COLUMN `price_in_usd`, + DROP COLUMN `is_margin`, + DROP COLUMN `is_isolated`, + DROP COLUMN `isolated_symbol` +; +-- +end diff --git a/migrations/mysql/20220504184155_fix_net_asset_column.sql b/migrations/mysql/20220504184155_fix_net_asset_column.sql new file mode 100644 index 0000000000..2d5a9d271b --- /dev/null +++ b/migrations/mysql/20220504184155_fix_net_asset_column.sql @@ -0,0 +1,19 @@ +-- +up +-- +begin +ALTER TABLE `nav_history_details` + MODIFY COLUMN `net_asset` DECIMAL(32, 8) DEFAULT 0.00000000 NOT NULL, + CHANGE COLUMN `balance_in_usd` `net_asset_in_usd` DECIMAL(32, 2) DEFAULT 0.00000000 NOT NULL, + CHANGE COLUMN `balance_in_btc` `net_asset_in_btc` DECIMAL(32, 20) DEFAULT 0.00000000 NOT NULL; +-- +end + +-- +begin +ALTER TABLE `nav_history_details` + ADD COLUMN `interest` DECIMAL(32, 20) UNSIGNED DEFAULT 0.00000000 NOT NULL; +-- +end + +-- +down + +-- +begin +ALTER TABLE `nav_history_details` + DROP COLUMN `interest`; +-- +end diff --git a/migrations/mysql/20220512170322_fix_profit_symbol_length.sql b/migrations/mysql/20220512170322_fix_profit_symbol_length.sql new file mode 100644 index 0000000000..60bc96be96 --- /dev/null +++ b/migrations/mysql/20220512170322_fix_profit_symbol_length.sql @@ -0,0 +1,11 @@ +-- +up +-- +begin +ALTER TABLE profits + CHANGE symbol symbol VARCHAR(20) NOT NULL; +-- +end + +-- +down + +-- +begin +SELECT 1; +-- +end diff --git a/migrations/mysql/20220520140707_kline_unique_idx.sql b/migrations/mysql/20220520140707_kline_unique_idx.sql new file mode 100644 index 0000000000..e45bde5f4b --- /dev/null +++ b/migrations/mysql/20220520140707_kline_unique_idx.sql @@ -0,0 +1,47 @@ +-- +up +-- +begin +CREATE UNIQUE INDEX idx_kline_binance_unique + ON binance_klines (`symbol`, `interval`, `start_time`); +-- +end + +-- +begin +CREATE UNIQUE INDEX idx_kline_max_unique + ON max_klines (`symbol`, `interval`, `start_time`); +-- +end + +-- +begin +CREATE UNIQUE INDEX `idx_kline_ftx_unique` + ON ftx_klines (`symbol`, `interval`, `start_time`); +-- +end + +-- +begin +CREATE UNIQUE INDEX `idx_kline_kucoin_unique` + ON kucoin_klines (`symbol`, `interval`, `start_time`); +-- +end + +-- +begin +CREATE UNIQUE INDEX `idx_kline_okex_unique` + ON okex_klines (`symbol`, `interval`, `start_time`); +-- +end + +-- +down + +-- +begin +DROP INDEX `idx_kline_ftx_unique` ON `ftx_klines`; +-- +end + +-- +begin +DROP INDEX `idx_kline_max_unique` ON `max_klines`; +-- +end + +-- +begin +DROP INDEX `idx_kline_binance_unique` ON `binance_klines`; +-- +end + +-- +begin +DROP INDEX `idx_kline_kucoin_unique` ON `kucoin_klines`; +-- +end + +-- +begin +DROP INDEX `idx_kline_okex_unique` ON `okex_klines`; +-- +end diff --git a/migrations/mysql/20220531012226_margin_loans.sql b/migrations/mysql/20220531012226_margin_loans.sql new file mode 100644 index 0000000000..dbbd1346cc --- /dev/null +++ b/migrations/mysql/20220531012226_margin_loans.sql @@ -0,0 +1,24 @@ +-- +up +CREATE TABLE `margin_loans` +( + `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + + `transaction_id` BIGINT UNSIGNED NOT NULL, + + `exchange` VARCHAR(24) NOT NULL DEFAULT '', + + `asset` VARCHAR(24) NOT NULL DEFAULT '', + + `isolated_symbol` VARCHAR(24) NOT NULL DEFAULT '', + + -- quantity is the quantity of the trade that makes profit + `principle` DECIMAL(16, 8) UNSIGNED NOT NULL, + + `time` DATETIME(3) NOT NULL, + + PRIMARY KEY (`gid`), + UNIQUE KEY (`transaction_id`) +); + +-- +down +DROP TABLE IF EXISTS `margin_loans`; diff --git a/migrations/mysql/20220531013327_margin_repays.sql b/migrations/mysql/20220531013327_margin_repays.sql new file mode 100644 index 0000000000..873b1ed73a --- /dev/null +++ b/migrations/mysql/20220531013327_margin_repays.sql @@ -0,0 +1,24 @@ +-- +up +CREATE TABLE `margin_repays` +( + `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + + `transaction_id` BIGINT UNSIGNED NOT NULL, + + `exchange` VARCHAR(24) NOT NULL DEFAULT '', + + `asset` VARCHAR(24) NOT NULL DEFAULT '', + + `isolated_symbol` VARCHAR(24) NOT NULL DEFAULT '', + + -- quantity is the quantity of the trade that makes profit + `principle` DECIMAL(16, 8) UNSIGNED NOT NULL, + + `time` DATETIME(3) NOT NULL, + + PRIMARY KEY (`gid`), + UNIQUE KEY (`transaction_id`) +); + +-- +down +DROP TABLE IF EXISTS `margin_repays`; diff --git a/migrations/mysql/20220531013542_margin_interests.sql b/migrations/mysql/20220531013542_margin_interests.sql new file mode 100644 index 0000000000..90169526b3 --- /dev/null +++ b/migrations/mysql/20220531013542_margin_interests.sql @@ -0,0 +1,24 @@ +-- +up +CREATE TABLE `margin_interests` +( + `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + + `exchange` VARCHAR(24) NOT NULL DEFAULT '', + + `asset` VARCHAR(24) NOT NULL DEFAULT '', + + `isolated_symbol` VARCHAR(24) NOT NULL DEFAULT '', + + `principle` DECIMAL(16, 8) UNSIGNED NOT NULL, + + `interest` DECIMAL(20, 16) UNSIGNED NOT NULL, + + `interest_rate` DECIMAL(20, 16) UNSIGNED NOT NULL, + + `time` DATETIME(3) NOT NULL, + + PRIMARY KEY (`gid`) +); + +-- +down +DROP TABLE IF EXISTS `margin_interests`; diff --git a/migrations/mysql/20220531015005_margin_liquidations.sql b/migrations/mysql/20220531015005_margin_liquidations.sql new file mode 100644 index 0000000000..82ea81f1d6 --- /dev/null +++ b/migrations/mysql/20220531015005_margin_liquidations.sql @@ -0,0 +1,33 @@ +-- +up +CREATE TABLE `margin_liquidations` +( + `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + + `exchange` VARCHAR(24) NOT NULL DEFAULT '', + + `symbol` VARCHAR(24) NOT NULL DEFAULT '', + + `order_id` BIGINT UNSIGNED NOT NULL, + + `is_isolated` BOOL NOT NULL DEFAULT false, + + `average_price` DECIMAL(16, 8) UNSIGNED NOT NULL, + + `price` DECIMAL(16, 8) UNSIGNED NOT NULL, + + `quantity` DECIMAL(16, 8) UNSIGNED NOT NULL, + + `executed_quantity` DECIMAL(16, 8) UNSIGNED NOT NULL, + + `side` VARCHAR(5) NOT NULL DEFAULT '', + + `time_in_force` VARCHAR(5) NOT NULL DEFAULT '', + + `time` DATETIME(3) NOT NULL, + + PRIMARY KEY (`gid`), + UNIQUE KEY (`order_id`, `exchange`) +); + +-- +down +DROP TABLE IF EXISTS `margin_liquidations`; diff --git a/migrations/sqlite3/20200721225616_trades.sql b/migrations/sqlite3/20200721225616_trades.sql new file mode 100644 index 0000000000..fcd5f8f503 --- /dev/null +++ b/migrations/sqlite3/20200721225616_trades.sql @@ -0,0 +1,20 @@ +-- +up +CREATE TABLE `trades` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + `id` INTEGER, + `exchange` TEXT NOT NULL DEFAULT '', + `symbol` TEXT NOT NULL, + `price` DECIMAL(16, 8) NOT NULL, + `quantity` DECIMAL(16, 8) NOT NULL, + `quote_quantity` DECIMAL(16, 8) NOT NULL, + `fee` DECIMAL(16, 8) NOT NULL, + `fee_currency` VARCHAR(4) NOT NULL, + `is_buyer` BOOLEAN NOT NULL DEFAULT FALSE, + `is_maker` BOOLEAN NOT NULL DEFAULT FALSE, + `side` VARCHAR(4) NOT NULL DEFAULT '', + `traded_at` DATETIME(3) NOT NULL +); + +-- +down +DROP TABLE IF EXISTS `trades`; diff --git a/migrations/20200819054742_trade_index.sql b/migrations/sqlite3/20200819054742_trade_index.sql similarity index 62% rename from migrations/20200819054742_trade_index.sql rename to migrations/sqlite3/20200819054742_trade_index.sql index 6ce90b1bd9..e6dba452be 100644 --- a/migrations/20200819054742_trade_index.sql +++ b/migrations/sqlite3/20200819054742_trade_index.sql @@ -4,6 +4,6 @@ CREATE INDEX trades_symbol_fee_currency ON trades(symbol, fee_currency, traded_a CREATE INDEX trades_traded_at_symbol ON trades(traded_at, symbol); -- +down -DROP INDEX trades_symbol ON trades; -DROP INDEX trades_symbol_fee_currency ON trades; -DROP INDEX trades_traded_at_symbol ON trades; +DROP INDEX trades_symbol; +DROP INDEX trades_symbol_fee_currency; +DROP INDEX trades_traded_at_symbol; diff --git a/migrations/sqlite3/20201102222546_orders.sql b/migrations/sqlite3/20201102222546_orders.sql new file mode 100644 index 0000000000..561afd06af --- /dev/null +++ b/migrations/sqlite3/20201102222546_orders.sql @@ -0,0 +1,25 @@ +-- +up +CREATE TABLE `orders` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + + `exchange` VARCHAR NOT NULL DEFAULT '', + -- order_id is the order id returned from the exchange + `order_id` INTEGER NOT NULL, + `client_order_id` VARCHAR NOT NULL DEFAULT '', + `order_type` VARCHAR NOT NULL, + `symbol` VARCHAR NOT NULL, + `status` VARCHAR NOT NULL, + `time_in_force` VARCHAR NOT NULL, + `price` DECIMAL(16, 8) NOT NULL, + `stop_price` DECIMAL(16, 8) NOT NULL, + `quantity` DECIMAL(16, 8) NOT NULL, + `executed_quantity` DECIMAL(16, 8) NOT NULL DEFAULT 0.0, + `side` VARCHAR NOT NULL DEFAULT '', + `is_working` BOOLEAN NOT NULL DEFAULT FALSE, + `created_at` DATETIME(3) NOT NULL, + `updated_at` DATETIME(3) NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +-- +down +DROP TABLE IF EXISTS `orders`; diff --git a/migrations/sqlite3/20201103173342_trades_add_order_id.sql b/migrations/sqlite3/20201103173342_trades_add_order_id.sql new file mode 100644 index 0000000000..f337eb2312 --- /dev/null +++ b/migrations/sqlite3/20201103173342_trades_add_order_id.sql @@ -0,0 +1,5 @@ +-- +up +ALTER TABLE `trades` ADD COLUMN `order_id` INTEGER; + +-- +down +ALTER TABLE `trades` RENAME COLUMN `order_id` TO `order_id_deleted`; diff --git a/migrations/20201105092857_trades_index_fix.sql b/migrations/sqlite3/20201105092857_trades_index_fix.sql similarity index 63% rename from migrations/20201105092857_trades_index_fix.sql rename to migrations/sqlite3/20201105092857_trades_index_fix.sql index 452f7fa8d7..30a9554063 100644 --- a/migrations/20201105092857_trades_index_fix.sql +++ b/migrations/sqlite3/20201105092857_trades_index_fix.sql @@ -1,16 +1,16 @@ -- +up -DROP INDEX trades_symbol ON trades; -DROP INDEX trades_symbol_fee_currency ON trades; -DROP INDEX trades_traded_at_symbol ON trades; +DROP INDEX IF EXISTS trades_symbol; +DROP INDEX IF EXISTS trades_symbol_fee_currency; +DROP INDEX IF EXISTS trades_traded_at_symbol; CREATE INDEX trades_symbol ON trades (exchange, symbol); CREATE INDEX trades_symbol_fee_currency ON trades (exchange, symbol, fee_currency, traded_at); CREATE INDEX trades_traded_at_symbol ON trades (exchange, traded_at, symbol); -- +down -DROP INDEX trades_symbol ON trades; -DROP INDEX trades_symbol_fee_currency ON trades; -DROP INDEX trades_traded_at_symbol ON trades; +DROP INDEX IF EXISTS trades_symbol; +DROP INDEX IF EXISTS trades_symbol_fee_currency; +DROP INDEX IF EXISTS trades_traded_at_symbol; CREATE INDEX trades_symbol ON trades (symbol); CREATE INDEX trades_symbol_fee_currency ON trades (symbol, fee_currency, traded_at); diff --git a/migrations/20201105093056_orders_add_index.sql b/migrations/sqlite3/20201105093056_orders_add_index.sql similarity index 65% rename from migrations/20201105093056_orders_add_index.sql rename to migrations/sqlite3/20201105093056_orders_add_index.sql index 4d685d4a9f..99834a5512 100644 --- a/migrations/20201105093056_orders_add_index.sql +++ b/migrations/sqlite3/20201105093056_orders_add_index.sql @@ -3,5 +3,5 @@ CREATE INDEX orders_symbol ON orders (exchange, symbol); CREATE UNIQUE INDEX orders_order_id ON orders (order_id, exchange); -- +down -DROP INDEX orders_symbol ON orders; -DROP INDEX orders_order_id ON orders; +DROP INDEX IF EXISTS orders_symbol; +DROP INDEX IF EXISTS orders_order_id; diff --git a/migrations/sqlite3/20201106114742_klines.sql b/migrations/sqlite3/20201106114742_klines.sql new file mode 100644 index 0000000000..f8ea6bacbf --- /dev/null +++ b/migrations/sqlite3/20201106114742_klines.sql @@ -0,0 +1,97 @@ +-- +up +-- +begin +CREATE TABLE `klines` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + `exchange` VARCHAR(10) NOT NULL, + `start_time` DATETIME(3) NOT NULL, + `end_time` DATETIME(3) NOT NULL, + `interval` VARCHAR(3) NOT NULL, + `symbol` VARCHAR(7) NOT NULL, + `open` DECIMAL(16, 8) NOT NULL, + `high` DECIMAL(16, 8) NOT NULL, + `low` DECIMAL(16, 8) NOT NULL, + `close` DECIMAL(16, 8) NOT NULL DEFAULT 0.0, + `volume` DECIMAL(16, 8) NOT NULL DEFAULT 0.0, + `closed` BOOLEAN NOT NULL DEFAULT TRUE, + `last_trade_id` INT NOT NULL DEFAULT 0, + `num_trades` INT NOT NULL DEFAULT 0 +); +-- +end + + +-- +begin +CREATE TABLE `okex_klines` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + `exchange` VARCHAR(10) NOT NULL, + `start_time` DATETIME(3) NOT NULL, + `end_time` DATETIME(3) NOT NULL, + `interval` VARCHAR(3) NOT NULL, + `symbol` VARCHAR(7) NOT NULL, + `open` DECIMAL(16, 8) NOT NULL, + `high` DECIMAL(16, 8) NOT NULL, + `low` DECIMAL(16, 8) NOT NULL, + `close` DECIMAL(16, 8) NOT NULL DEFAULT 0.0, + `volume` DECIMAL(16, 8) NOT NULL DEFAULT 0.0, + `closed` BOOLEAN NOT NULL DEFAULT TRUE, + `last_trade_id` INT NOT NULL DEFAULT 0, + `num_trades` INT NOT NULL DEFAULT 0 +); +-- +end + +-- +begin +CREATE TABLE `binance_klines` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + `exchange` VARCHAR(10) NOT NULL, + `start_time` DATETIME(3) NOT NULL, + `end_time` DATETIME(3) NOT NULL, + `interval` VARCHAR(3) NOT NULL, + `symbol` VARCHAR(7) NOT NULL, + `open` DECIMAL(16, 8) NOT NULL, + `high` DECIMAL(16, 8) NOT NULL, + `low` DECIMAL(16, 8) NOT NULL, + `close` DECIMAL(16, 8) NOT NULL DEFAULT 0.0, + `volume` DECIMAL(16, 8) NOT NULL DEFAULT 0.0, + `closed` BOOLEAN NOT NULL DEFAULT TRUE, + `last_trade_id` INT NOT NULL DEFAULT 0, + `num_trades` INT NOT NULL DEFAULT 0 +); +-- +end + +-- +begin +CREATE TABLE `max_klines` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + `exchange` VARCHAR(10) NOT NULL, + `start_time` DATETIME(3) NOT NULL, + `end_time` DATETIME(3) NOT NULL, + `interval` VARCHAR(3) NOT NULL, + `symbol` VARCHAR(7) NOT NULL, + `open` DECIMAL(16, 8) NOT NULL, + `high` DECIMAL(16, 8) NOT NULL, + `low` DECIMAL(16, 8) NOT NULL, + `close` DECIMAL(16, 8) NOT NULL DEFAULT 0.0, + `volume` DECIMAL(16, 8) NOT NULL DEFAULT 0.0, + `closed` BOOLEAN NOT NULL DEFAULT TRUE, + `last_trade_id` INT NOT NULL DEFAULT 0, + `num_trades` INT NOT NULL DEFAULT 0 +); +-- +end + +-- +begin +CREATE INDEX `klines_end_time_symbol_interval` ON `klines` (`end_time`, `symbol`, `interval`); +CREATE INDEX `binance_klines_end_time_symbol_interval` ON `binance_klines` (`end_time`, `symbol`, `interval`); +CREATE INDEX `okex_klines_end_time_symbol_interval` ON `okex_klines` (`end_time`, `symbol`, `interval`); +CREATE INDEX `max_klines_end_time_symbol_interval` ON `max_klines` (`end_time`, `symbol`, `interval`); +-- +end + + +-- +down +DROP INDEX IF EXISTS `klines_end_time_symbol_interval`; +DROP TABLE IF EXISTS `binance_klines`; +DROP TABLE IF EXISTS `okex_klines`; +DROP TABLE IF EXISTS `max_klines`; +DROP TABLE IF EXISTS `klines`; + diff --git a/migrations/sqlite3/20201211175751_fix_symbol_length.sql b/migrations/sqlite3/20201211175751_fix_symbol_length.sql new file mode 100644 index 0000000000..06569c6673 --- /dev/null +++ b/migrations/sqlite3/20201211175751_fix_symbol_length.sql @@ -0,0 +1,5 @@ +-- +up +SELECT 1; + +-- +down +SELECT 1; diff --git a/migrations/sqlite3/20210118163847_fix_unique_index.sql b/migrations/sqlite3/20210118163847_fix_unique_index.sql new file mode 100644 index 0000000000..60ada793e4 --- /dev/null +++ b/migrations/sqlite3/20210118163847_fix_unique_index.sql @@ -0,0 +1,9 @@ +-- +up +-- +begin +CREATE UNIQUE INDEX `trade_unique_id` ON `trades` (`exchange`,`symbol`, `side`, `id`); +-- +end + +-- +down +-- +begin +DROP INDEX IF EXISTS `trade_unique_id`; +-- +end diff --git a/migrations/sqlite3/20210119232826_add_margin_columns.sql b/migrations/sqlite3/20210119232826_add_margin_columns.sql new file mode 100644 index 0000000000..eaff7b6130 --- /dev/null +++ b/migrations/sqlite3/20210119232826_add_margin_columns.sql @@ -0,0 +1,33 @@ +-- +up +-- +begin +ALTER TABLE `trades` ADD COLUMN `is_margin` BOOLEAN NOT NULL DEFAULT FALSE; +-- +end +-- +begin +ALTER TABLE `trades` ADD COLUMN `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE; +-- +end + +-- +begin +ALTER TABLE `orders` ADD COLUMN `is_margin` BOOLEAN NOT NULL DEFAULT FALSE; +-- +end + +-- +begin +ALTER TABLE `orders` ADD COLUMN `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE; +-- +end + +-- +down + +-- +begin +ALTER TABLE `trades` RENAME COLUMN `is_margin` TO `is_margin_deleted`; +-- +end + +-- +begin +ALTER TABLE `trades` RENAME COLUMN `is_isolated` TO `is_isolated_deleted`; +-- +end + +-- +begin +ALTER TABLE `orders` RENAME COLUMN `is_margin` TO `is_margin_deleted`; +-- +end + +-- +begin +ALTER TABLE `orders` RENAME COLUMN `is_isolated` TO `is_isolated_deleted`; +-- +end diff --git a/migrations/sqlite3/20210129182704_trade_price_quantity_index.sql b/migrations/sqlite3/20210129182704_trade_price_quantity_index.sql new file mode 100644 index 0000000000..196f7467d9 --- /dev/null +++ b/migrations/sqlite3/20210129182704_trade_price_quantity_index.sql @@ -0,0 +1,10 @@ +-- +up +-- +begin +CREATE INDEX trades_price_quantity ON trades (order_id,price,quantity); +-- +end + +-- +down + +-- +begin +DROP INDEX trades_price_quantity; +-- +end diff --git a/migrations/sqlite3/20210215203111_add_pnl_column.sql b/migrations/sqlite3/20210215203111_add_pnl_column.sql new file mode 100644 index 0000000000..9e6e2e2fa1 --- /dev/null +++ b/migrations/sqlite3/20210215203111_add_pnl_column.sql @@ -0,0 +1,18 @@ +-- +up +-- +begin +ALTER TABLE `trades` ADD COLUMN `pnl` DECIMAL NULL; +-- +end + +-- +begin +ALTER TABLE `trades` ADD COLUMN `strategy` TEXT; +-- +end + +-- +down + +-- +begin +ALTER TABLE `trades` RENAME COLUMN `pnl` TO `pnl_deleted`; +-- +end + +-- +begin +ALTER TABLE `trades` RENAME COLUMN `strategy` TO `strategy_deleted`; +-- +end diff --git a/migrations/sqlite3/20210223080622_add_rewards_table.sql b/migrations/sqlite3/20210223080622_add_rewards_table.sql new file mode 100644 index 0000000000..ff6b3ae581 --- /dev/null +++ b/migrations/sqlite3/20210223080622_add_rewards_table.sql @@ -0,0 +1,29 @@ +-- +up +CREATE TABLE `rewards` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + + -- for exchange + `exchange` VARCHAR(24) NOT NULL DEFAULT '', + + -- reward record id + `uuid` VARCHAR(32) NOT NULL, + `reward_type` VARCHAR(24) NOT NULL DEFAULT '', + + -- currency symbol, BTC, MAX, USDT ... etc + `currency` VARCHAR(5) NOT NULL, + + -- the quantity of the rewards + `quantity` DECIMAL(16, 8) NOT NULL, + + `state` VARCHAR(5) NOT NULL, + + `created_at` DATETIME NOT NULL, + + `spent` BOOLEAN NOT NULL DEFAULT FALSE, + + `note` TEXT NULL +); + +-- +down +DROP TABLE IF EXISTS `rewards`; diff --git a/migrations/sqlite3/20210301140656_add_withdraws_table.sql b/migrations/sqlite3/20210301140656_add_withdraws_table.sql new file mode 100644 index 0000000000..d1b2cb282f --- /dev/null +++ b/migrations/sqlite3/20210301140656_add_withdraws_table.sql @@ -0,0 +1,36 @@ +-- +up +-- +begin +CREATE TABLE `withdraws` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + `exchange` VARCHAR(24) NOT NULL DEFAULT '', + + -- asset is the asset name (currency) + `asset` VARCHAR(10) NOT NULL, + + `address` VARCHAR(128) NOT NULL, + `network` VARCHAR(32) NOT NULL DEFAULT '', + `amount` DECIMAL(16, 8) NOT NULL, + + `txn_id` VARCHAR(256) NOT NULL, + `txn_fee` DECIMAL(16, 8) NOT NULL DEFAULT 0, + `txn_fee_currency` VARCHAR(32) NOT NULL DEFAULT '', + `time` DATETIME(3) NOT NULL +); +-- +end + +-- +begin +CREATE UNIQUE INDEX `withdraws_txn_id` ON `withdraws` (`exchange`, `txn_id`); +-- +end + + +-- +down + +-- +begin +DROP INDEX IF EXISTS `withdraws_txn_id`; +-- +end + +-- +begin +DROP TABLE IF EXISTS `withdraws`; +-- +end + diff --git a/migrations/sqlite3/20210307201830_add_deposits_table.sql b/migrations/sqlite3/20210307201830_add_deposits_table.sql new file mode 100644 index 0000000000..2c8e0baf8a --- /dev/null +++ b/migrations/sqlite3/20210307201830_add_deposits_table.sql @@ -0,0 +1,31 @@ +-- +up +-- +begin +CREATE TABLE `deposits` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + `exchange` VARCHAR(24) NOT NULL, + + -- asset is the asset name (currency) + `asset` VARCHAR(10) NOT NULL, + + `address` VARCHAR(128) NOT NULL DEFAULT '', + `amount` DECIMAL(16, 8) NOT NULL, + `txn_id` VARCHAR(256) NOT NULL, + `time` DATETIME(3) NOT NULL +); +-- +end +-- +begin +CREATE UNIQUE INDEX `deposits_txn_id` ON `deposits` (`exchange`, `txn_id`); +-- +end + + +-- +down + +-- +begin +DROP INDEX IF EXISTS `deposits_txn_id`; +-- +end + +-- +begin +DROP TABLE IF EXISTS `deposits`; +-- +end + diff --git a/migrations/sqlite3/20210531234123_add_kline_taker_buy_columns.sql b/migrations/sqlite3/20210531234123_add_kline_taker_buy_columns.sql new file mode 100644 index 0000000000..e86a3da4b7 --- /dev/null +++ b/migrations/sqlite3/20210531234123_add_kline_taker_buy_columns.sql @@ -0,0 +1,35 @@ +-- +up +-- +begin +ALTER TABLE `binance_klines` + ADD COLUMN `quote_volume` DECIMAL NOT NULL DEFAULT 0.0; +ALTER TABLE `binance_klines` + ADD COLUMN `taker_buy_base_volume` DECIMAL NOT NULL DEFAULT 0.0; +ALTER TABLE `binance_klines` + ADD COLUMN `taker_buy_quote_volume` DECIMAL NOT NULL DEFAULT 0.0; +-- +end +-- +begin +ALTER TABLE `max_klines` + ADD COLUMN `quote_volume` DECIMAL NOT NULL DEFAULT 0.0; +ALTER TABLE `max_klines` + ADD COLUMN `taker_buy_base_volume` DECIMAL NOT NULL DEFAULT 0.0; +ALTER TABLE `max_klines` + ADD COLUMN `taker_buy_quote_volume` DECIMAL NOT NULL DEFAULT 0.0; +-- +end +-- +begin +ALTER TABLE `okex_klines` + ADD COLUMN `quote_volume` DECIMAL NOT NULL DEFAULT 0.0; +ALTER TABLE `okex_klines` + ADD COLUMN `taker_buy_base_volume` DECIMAL NOT NULL DEFAULT 0.0; +ALTER TABLE `okex_klines` + ADD COLUMN `taker_buy_quote_volume` DECIMAL NOT NULL DEFAULT 0.0; +-- +end +-- +begin +ALTER TABLE `klines` + ADD COLUMN `quote_volume` DECIMAL NOT NULL DEFAULT 0.0; +ALTER TABLE `klines` + ADD COLUMN `taker_buy_base_volume` DECIMAL NOT NULL DEFAULT 0.0; +ALTER TABLE `klines` + ADD COLUMN `taker_buy_quote_volume` DECIMAL NOT NULL DEFAULT 0.0; +-- +end + +-- +down diff --git a/migrations/sqlite3/20211205162302_add_is_futures_column.sql b/migrations/sqlite3/20211205162302_add_is_futures_column.sql new file mode 100644 index 0000000000..ca020183e2 --- /dev/null +++ b/migrations/sqlite3/20211205162302_add_is_futures_column.sql @@ -0,0 +1,18 @@ +-- +up +-- +begin +ALTER TABLE `trades` ADD COLUMN `is_futures` BOOLEAN NOT NULL DEFAULT FALSE; +-- +end + +-- +begin +ALTER TABLE `orders` ADD COLUMN `is_futures` BOOLEAN NOT NULL DEFAULT FALSE; +-- +end + +-- +down + +-- +begin +ALTER TABLE `trades` RENAME COLUMN `is_futures` TO `is_futures_deleted`; +-- +end + +-- +begin +ALTER TABLE `orders` RENAME COLUMN `is_futures` TO `is_futures_deleted`; +-- +end diff --git a/migrations/sqlite3/20211211020303_add_ftx_kline.sql b/migrations/sqlite3/20211211020303_add_ftx_kline.sql new file mode 100644 index 0000000000..0b34d02e8d --- /dev/null +++ b/migrations/sqlite3/20211211020303_add_ftx_kline.sql @@ -0,0 +1,30 @@ +-- +up +-- +begin + +CREATE TABLE `ftx_klines` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + `exchange` VARCHAR(10) NOT NULL, + `start_time` DATETIME(3) NOT NULL, + `end_time` DATETIME(3) NOT NULL, + `interval` VARCHAR(3) NOT NULL, + `symbol` VARCHAR(7) NOT NULL, + `open` DECIMAL(16, 8) NOT NULL, + `high` DECIMAL(16, 8) NOT NULL, + `low` DECIMAL(16, 8) NOT NULL, + `close` DECIMAL(16, 8) NOT NULL DEFAULT 0.0, + `volume` DECIMAL(16, 8) NOT NULL DEFAULT 0.0, + `closed` BOOLEAN NOT NULL DEFAULT TRUE, + `last_trade_id` INT NOT NULL DEFAULT 0, + `num_trades` INT NOT NULL DEFAULT 0, + `quote_volume` DECIMAL NOT NULL DEFAULT 0.0, + `taker_buy_base_volume` DECIMAL NOT NULL DEFAULT 0.0, + `taker_buy_quote_volume` DECIMAL NOT NULL DEFAULT 0.0 +); +-- +end + +-- +down + +-- +begin +DROP TABLE ftx_klines; +-- +end diff --git a/migrations/sqlite3/20211211034818_add_nav_history_details.sql b/migrations/sqlite3/20211211034818_add_nav_history_details.sql new file mode 100644 index 0000000000..56860040ca --- /dev/null +++ b/migrations/sqlite3/20211211034818_add_nav_history_details.sql @@ -0,0 +1,26 @@ +-- +up +-- +begin +CREATE TABLE `nav_history_details` +( + `gid` BIGINT UNSIGNED AUTO_INCREMENT PRIMARY KEY, + `exchange` VARCHAR(30) NOT NULL DEFAULT '', + `subaccount` VARCHAR(30) NOT NULL DEFAULT '', + `time` DATETIME(3) NOT NULL DEFAULT (strftime('%s', 'now')), + `currency` VARCHAR(30) NOT NULL, + `net_asset_in_usd` DECIMAL DEFAULT 0.00000000 NOT NULL, + `net_asset_in_btc` DECIMAL DEFAULT 0.00000000 NOT NULL, + `balance` DECIMAL DEFAULT 0.00000000 NOT NULL, + `available` DECIMAL DEFAULT 0.00000000 NOT NULL, + `locked` DECIMAL DEFAULT 0.00000000 NOT NULL +); +-- +end +-- +begin +CREATE INDEX idx_nav_history_details + on nav_history_details (time, currency, exchange); +-- +end + +-- +down + +-- +begin +DROP TABLE nav_history_details; +-- +end diff --git a/migrations/sqlite3/20211211103657_update_fee_currency_length.sql b/migrations/sqlite3/20211211103657_update_fee_currency_length.sql new file mode 100644 index 0000000000..9f146225ee --- /dev/null +++ b/migrations/sqlite3/20211211103657_update_fee_currency_length.sql @@ -0,0 +1,10 @@ +-- +up +-- +begin +SELECT 1; +-- +end + +-- +down + +-- +begin +SELECT 1; +-- +end diff --git a/migrations/sqlite3/20211226022411_add_kucoin_klines.sql b/migrations/sqlite3/20211226022411_add_kucoin_klines.sql new file mode 100644 index 0000000000..c53a00193c --- /dev/null +++ b/migrations/sqlite3/20211226022411_add_kucoin_klines.sql @@ -0,0 +1,29 @@ +-- +up +-- +begin +CREATE TABLE `kucoin_klines` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + `exchange` VARCHAR(10) NOT NULL, + `start_time` DATETIME(3) NOT NULL, + `end_time` DATETIME(3) NOT NULL, + `interval` VARCHAR(3) NOT NULL, + `symbol` VARCHAR(7) NOT NULL, + `open` DECIMAL(16, 8) NOT NULL, + `high` DECIMAL(16, 8) NOT NULL, + `low` DECIMAL(16, 8) NOT NULL, + `close` DECIMAL(16, 8) NOT NULL DEFAULT 0.0, + `volume` DECIMAL(16, 8) NOT NULL DEFAULT 0.0, + `closed` BOOLEAN NOT NULL DEFAULT TRUE, + `last_trade_id` INT NOT NULL DEFAULT 0, + `num_trades` INT NOT NULL DEFAULT 0, + `quote_volume` DECIMAL NOT NULL DEFAULT 0.0, + `taker_buy_base_volume` DECIMAL NOT NULL DEFAULT 0.0, + `taker_buy_quote_volume` DECIMAL NOT NULL DEFAULT 0.0 +); +-- +end + +-- +down + +-- +begin +DROP TABLE kucoin_klines; +-- +end diff --git a/migrations/sqlite3/20220304153309_add_profit_table.sql b/migrations/sqlite3/20220304153309_add_profit_table.sql new file mode 100644 index 0000000000..290d325399 --- /dev/null +++ b/migrations/sqlite3/20220304153309_add_profit_table.sql @@ -0,0 +1,68 @@ +-- +up +CREATE TABLE `profits` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + + `strategy` VARCHAR(32) NOT NULL, + `strategy_instance_id` VARCHAR(64) NOT NULL, + + `symbol` VARCHAR(8) NOT NULL, + + -- average_cost is the position average cost + `average_cost` DECIMAL(16, 8) NOT NULL, + + -- profit is the pnl (profit and loss) + `profit` DECIMAL(16, 8) NOT NULL, + + -- net_profit is the pnl (profit and loss) + `net_profit` DECIMAL(16, 8) NOT NULL, + + -- profit_margin is the pnl (profit and loss) + `profit_margin` DECIMAL(16, 8) NOT NULL, + + -- net_profit_margin is the pnl (profit and loss) + `net_profit_margin` DECIMAL(16, 8) NOT NULL, + + `quote_currency` VARCHAR(10) NOT NULL, + + `base_currency` VARCHAR(10) NOT NULL, + + -- ------------------------------------------------------- + -- embedded trade data -- + -- ------------------------------------------------------- + `exchange` VARCHAR(24) NOT NULL DEFAULT '', + + `is_futures` BOOLEAN NOT NULL DEFAULT FALSE, + + `is_margin` BOOLEAN NOT NULL DEFAULT FALSE, + + `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE, + + `trade_id` BIGINT NOT NULL, + + -- side is the side of the trade that makes profit + `side` VARCHAR(4) NOT NULL DEFAULT '', + + `is_buyer` BOOLEAN NOT NULL DEFAULT FALSE, + + `is_maker` BOOLEAN NOT NULL DEFAULT FALSE, + + -- price is the price of the trade that makes profit + `price` DECIMAL(16, 8) NOT NULL, + + -- quantity is the quantity of the trade that makes profit + `quantity` DECIMAL(16, 8) NOT NULL, + + -- trade_amount is the quote quantity of the trade that makes profit + `quote_quantity` DECIMAL(16, 8) NOT NULL, + + `traded_at` DATETIME(3) NOT NULL, + + -- fee + `fee_in_usd` DECIMAL(16, 8), + `fee` DECIMAL(16, 8) NOT NULL, + `fee_currency` VARCHAR(10) NOT NULL +); + +-- +down +DROP TABLE IF EXISTS `profits`; diff --git a/migrations/sqlite3/20220307132917_add_positions.sql b/migrations/sqlite3/20220307132917_add_positions.sql new file mode 100644 index 0000000000..23ea853e54 --- /dev/null +++ b/migrations/sqlite3/20220307132917_add_positions.sql @@ -0,0 +1,27 @@ +-- +up +CREATE TABLE `positions` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + + `strategy` VARCHAR(32) NOT NULL, + `strategy_instance_id` VARCHAR(64) NOT NULL, + + `symbol` VARCHAR(20) NOT NULL, + `quote_currency` VARCHAR(10) NOT NULL, + `base_currency` VARCHAR(10) NOT NULL, + + -- average_cost is the position average cost + `average_cost` DECIMAL(16, 8) NOT NULL, + `base` DECIMAL(16, 8) NOT NULL, + `quote` DECIMAL(16, 8) NOT NULL, + `profit` DECIMAL(16, 8) NULL, + + -- trade related columns + `trade_id` BIGINT NOT NULL, + `side` VARCHAR(4) NOT NULL, -- side of the trade + `exchange` VARCHAR(12) NOT NULL, -- exchange of the trade + `traded_at` DATETIME(3) NOT NULL +); + +-- +down +DROP TABLE IF EXISTS `positions`; diff --git a/migrations/sqlite3/20220317125555_fix_trade_indexes.sql b/migrations/sqlite3/20220317125555_fix_trade_indexes.sql new file mode 100644 index 0000000000..57be7e6356 --- /dev/null +++ b/migrations/sqlite3/20220317125555_fix_trade_indexes.sql @@ -0,0 +1,19 @@ +-- +up +DROP INDEX IF EXISTS trades_symbol; +DROP INDEX IF EXISTS trades_symbol_fee_currency; +DROP INDEX IF EXISTS trades_traded_at_symbol; + +-- this index is used for general trade query +CREATE INDEX trades_traded_at ON trades (traded_at, symbol, exchange, id, fee_currency, fee); +-- this index is used for join clause by trade_id +CREATE INDEX trades_id_traded_at ON trades (id, traded_at); +-- this index is used for join clause by order id +CREATE INDEX trades_order_id_traded_at ON trades (order_id, traded_at); + +-- +down +DROP INDEX IF EXISTS trades_traded_at; +DROP INDEX IF EXISTS trades_id_traded_at; +DROP INDEX IF EXISTS trades_order_id_traded_at; +CREATE INDEX trades_symbol ON trades (exchange, symbol); +CREATE INDEX trades_symbol_fee_currency ON trades (exchange, symbol, fee_currency, traded_at); +CREATE INDEX trades_traded_at_symbol ON trades (exchange, traded_at, symbol); diff --git a/migrations/sqlite3/20220419121046_fix_fee_column.sql b/migrations/sqlite3/20220419121046_fix_fee_column.sql new file mode 100644 index 0000000000..9f146225ee --- /dev/null +++ b/migrations/sqlite3/20220419121046_fix_fee_column.sql @@ -0,0 +1,10 @@ +-- +up +-- +begin +SELECT 1; +-- +end + +-- +down + +-- +begin +SELECT 1; +-- +end diff --git a/migrations/sqlite3/20220503144849_add_margin_info_to_nav.sql b/migrations/sqlite3/20220503144849_add_margin_info_to_nav.sql new file mode 100644 index 0000000000..7f2fc06e0e --- /dev/null +++ b/migrations/sqlite3/20220503144849_add_margin_info_to_nav.sql @@ -0,0 +1,12 @@ +-- +up +ALTER TABLE `nav_history_details` ADD COLUMN `session` VARCHAR(50) NOT NULL; +ALTER TABLE `nav_history_details` ADD COLUMN `borrowed` DECIMAL DEFAULT 0.00000000 NOT NULL; +ALTER TABLE `nav_history_details` ADD COLUMN `net_asset` DECIMAL DEFAULT 0.00000000 NOT NULL; +ALTER TABLE `nav_history_details` ADD COLUMN `price_in_usd` DECIMAL DEFAULT 0.00000000 NOT NULL; +ALTER TABLE `nav_history_details` ADD COLUMN `is_margin` BOOL DEFAULT FALSE NOT NULL; +ALTER TABLE `nav_history_details` ADD COLUMN `is_isolated` BOOL DEFAULT FALSE NOT NULL; +ALTER TABLE `nav_history_details` ADD COLUMN `isolated_symbol` VARCHAR(30) DEFAULT '' NOT NULL; + +-- +down +-- we can not rollback alter table change in sqlite +SELECT 1; diff --git a/migrations/sqlite3/20220504184155_fix_net_asset_column.sql b/migrations/sqlite3/20220504184155_fix_net_asset_column.sql new file mode 100644 index 0000000000..96993735ee --- /dev/null +++ b/migrations/sqlite3/20220504184155_fix_net_asset_column.sql @@ -0,0 +1,11 @@ +-- +up +-- +begin +ALTER TABLE `nav_history_details` ADD COLUMN `interest` DECIMAL DEFAULT 0.00000000 NOT NULL; +-- +end + + +-- +down + +-- +begin +SELECT 1; +-- +end diff --git a/migrations/sqlite3/20220512170330_fix_profit_symbol_length.sql b/migrations/sqlite3/20220512170330_fix_profit_symbol_length.sql new file mode 100644 index 0000000000..583c4051e3 --- /dev/null +++ b/migrations/sqlite3/20220512170330_fix_profit_symbol_length.sql @@ -0,0 +1,12 @@ +-- +up +-- +begin +-- We can not change column type in sqlite +-- However, SQLite does not enforce the length of a VARCHAR, i.e VARCHAR(8) == VARCHAR(20) == TEXT +SELECT 1; +-- +end + +-- +down + +-- +begin +SELECT 1; +-- +end diff --git a/migrations/sqlite3/20220520140707_kline_unique_idx.sql b/migrations/sqlite3/20220520140707_kline_unique_idx.sql new file mode 100644 index 0000000000..e45bde5f4b --- /dev/null +++ b/migrations/sqlite3/20220520140707_kline_unique_idx.sql @@ -0,0 +1,47 @@ +-- +up +-- +begin +CREATE UNIQUE INDEX idx_kline_binance_unique + ON binance_klines (`symbol`, `interval`, `start_time`); +-- +end + +-- +begin +CREATE UNIQUE INDEX idx_kline_max_unique + ON max_klines (`symbol`, `interval`, `start_time`); +-- +end + +-- +begin +CREATE UNIQUE INDEX `idx_kline_ftx_unique` + ON ftx_klines (`symbol`, `interval`, `start_time`); +-- +end + +-- +begin +CREATE UNIQUE INDEX `idx_kline_kucoin_unique` + ON kucoin_klines (`symbol`, `interval`, `start_time`); +-- +end + +-- +begin +CREATE UNIQUE INDEX `idx_kline_okex_unique` + ON okex_klines (`symbol`, `interval`, `start_time`); +-- +end + +-- +down + +-- +begin +DROP INDEX `idx_kline_ftx_unique` ON `ftx_klines`; +-- +end + +-- +begin +DROP INDEX `idx_kline_max_unique` ON `max_klines`; +-- +end + +-- +begin +DROP INDEX `idx_kline_binance_unique` ON `binance_klines`; +-- +end + +-- +begin +DROP INDEX `idx_kline_kucoin_unique` ON `kucoin_klines`; +-- +end + +-- +begin +DROP INDEX `idx_kline_okex_unique` ON `okex_klines`; +-- +end diff --git a/migrations/sqlite3/20220531012226_margin_loans.sql b/migrations/sqlite3/20220531012226_margin_loans.sql new file mode 100644 index 0000000000..2569e671a9 --- /dev/null +++ b/migrations/sqlite3/20220531012226_margin_loans.sql @@ -0,0 +1,21 @@ +-- +up +CREATE TABLE `margin_loans` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + + `transaction_id` INTEGER NOT NULL, + + `exchange` VARCHAR(24) NOT NULL DEFAULT '', + + `asset` VARCHAR(24) NOT NULL DEFAULT '', + + `isolated_symbol` VARCHAR(24) NOT NULL DEFAULT '', + + -- quantity is the quantity of the trade that makes profit + `principle` DECIMAL(16, 8) NOT NULL, + + `time` DATETIME(3) NOT NULL +); + +-- +down +DROP TABLE IF EXISTS `margin_loans`; diff --git a/migrations/sqlite3/20220531013327_margin_repays.sql b/migrations/sqlite3/20220531013327_margin_repays.sql new file mode 100644 index 0000000000..c9f6123650 --- /dev/null +++ b/migrations/sqlite3/20220531013327_margin_repays.sql @@ -0,0 +1,21 @@ +-- +up +CREATE TABLE `margin_repays` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + + `transaction_id` INTEGER NOT NULL, + + `exchange` VARCHAR(24) NOT NULL DEFAULT '', + + `asset` VARCHAR(24) NOT NULL DEFAULT '', + + `isolated_symbol` VARCHAR(24) NOT NULL DEFAULT '', + + -- quantity is the quantity of the trade that makes profit + `principle` DECIMAL(16, 8) NOT NULL, + + `time` DATETIME(3) NOT NULL +); + +-- +down +DROP TABLE IF EXISTS `margin_repays`; diff --git a/migrations/sqlite3/20220531013541_margin_interests.sql b/migrations/sqlite3/20220531013541_margin_interests.sql new file mode 100644 index 0000000000..f088f25814 --- /dev/null +++ b/migrations/sqlite3/20220531013541_margin_interests.sql @@ -0,0 +1,22 @@ +-- +up +CREATE TABLE `margin_interests` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + + `exchange` VARCHAR(24) NOT NULL DEFAULT '', + + `asset` VARCHAR(24) NOT NULL DEFAULT '', + + `isolated_symbol` VARCHAR(24) NOT NULL DEFAULT '', + + `principle` DECIMAL(16, 8) NOT NULL, + + `interest` DECIMAL(20, 16) NOT NULL, + + `interest_rate` DECIMAL(20, 16) NOT NULL, + + `time` DATETIME(3) NOT NULL +); + +-- +down +DROP TABLE IF EXISTS `margin_interests`; diff --git a/migrations/sqlite3/20220531015005_margin_liquidations.sql b/migrations/sqlite3/20220531015005_margin_liquidations.sql new file mode 100644 index 0000000000..5a99afc362 --- /dev/null +++ b/migrations/sqlite3/20220531015005_margin_liquidations.sql @@ -0,0 +1,30 @@ +-- +up +CREATE TABLE `margin_liquidations` +( + `gid` INTEGER PRIMARY KEY AUTOINCREMENT, + + `exchange` VARCHAR(24) NOT NULL DEFAULT '', + + `symbol` VARCHAR(24) NOT NULL DEFAULT '', + + `order_id` INTEGER NOT NULL, + + `is_isolated` BOOL NOT NULL DEFAULT false, + + `average_price` DECIMAL(16, 8) NOT NULL, + + `price` DECIMAL(16, 8) NOT NULL, + + `quantity` DECIMAL(16, 8) NOT NULL, + + `executed_quantity` DECIMAL(16, 8) NOT NULL, + + `side` VARCHAR(5) NOT NULL DEFAULT '', + + `time_in_force` VARCHAR(5) NOT NULL DEFAULT '', + + `time` DATETIME(3) NOT NULL +); + +-- +down +DROP TABLE IF EXISTS `margin_liquidations`; diff --git a/pkg/accounting/cost_distribution.go b/pkg/accounting/cost_distribution.go index 83eefe0c69..5c588b1821 100644 --- a/pkg/accounting/cost_distribution.go +++ b/pkg/accounting/cost_distribution.go @@ -1,54 +1,46 @@ package accounting import ( - "fmt" "math" "sort" "strconv" "strings" "sync" + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" ) -func zero(a float64) bool { - return int(math.Round(a*1e8)) == 0 -} - -func round(a float64) float64 { - return math.Round(a*1e8) / 1e8 -} - type Stock types.Trade func (stock *Stock) String() string { - return fmt.Sprintf("%f (%f)", stock.Price, stock.Quantity) + return stock.Price.String() + " (" + stock.Quantity.String() + ")" } -func (stock *Stock) Consume(quantity float64) float64 { - q := math.Min(stock.Quantity, quantity) - stock.Quantity = round(stock.Quantity - q) +func (stock *Stock) Consume(quantity fixedpoint.Value) fixedpoint.Value { + q := fixedpoint.Min(stock.Quantity, quantity) + stock.Quantity = stock.Quantity.Sub(q) return q } type StockSlice []Stock -func (slice StockSlice) QuantityBelowPrice(price float64) (quantity float64) { +func (slice StockSlice) QuantityBelowPrice(price fixedpoint.Value) (quantity fixedpoint.Value) { for _, stock := range slice { - if stock.Price < price { - quantity += stock.Quantity + if stock.Price.Compare(price) < 0 { + quantity = quantity.Add(stock.Quantity) } } - return round(quantity) + return quantity } -func (slice StockSlice) Quantity() (total float64) { +func (slice StockSlice) Quantity() (total fixedpoint.Value) { for _, stock := range slice { - total += stock.Quantity + total = total.Add(stock.Quantity) } - return round(total) + return total } type StockDistribution struct { @@ -61,28 +53,26 @@ type StockDistribution struct { } type DistributionStats struct { - PriceLevels []string `json:"priceLevels"` - TotalQuantity float64 `json:"totalQuantity"` - Quantities map[string]float64 `json:"quantities"` - Stocks map[string]StockSlice `json:"stocks"` + PriceLevels []string `json:"priceLevels"` + TotalQuantity fixedpoint.Value `json:"totalQuantity"` + Quantities map[string]fixedpoint.Value `json:"quantities"` + Stocks map[string]StockSlice `json:"stocks"` } func (m *StockDistribution) DistributionStats(level int) *DistributionStats { var d = DistributionStats{ - Quantities: map[string]float64{}, + Quantities: map[string]fixedpoint.Value{}, Stocks: map[string]StockSlice{}, } for _, stock := range m.Stocks { - n := math.Ceil(math.Log10(stock.Price)) + n := math.Ceil(math.Log10(stock.Price.Float64())) digits := int(n - math.Max(float64(level), 1.0)) - div := math.Pow10(digits) - priceLevel := math.Floor(stock.Price/div) * div - key := strconv.FormatFloat(priceLevel, 'f', 2, 64) + key := stock.Price.Round(-digits, fixedpoint.Down).FormatString(2) - d.TotalQuantity += stock.Quantity + d.TotalQuantity = d.TotalQuantity.Add(stock.Quantity) d.Stocks[key] = append(d.Stocks[key], stock) - d.Quantities[key] += stock.Quantity + d.Quantities[key] = d.Quantities[key].Add(stock.Quantity) } var priceLevels []float64 @@ -96,8 +86,6 @@ func (m *StockDistribution) DistributionStats(level int) *DistributionStats { d.PriceLevels = append(d.PriceLevels, strconv.FormatFloat(price, 'f', 2, 64)) } - sort.Float64s(priceLevels) - return &d } @@ -114,7 +102,7 @@ func (m *StockDistribution) squash() { var squashed StockSlice for _, stock := range m.Stocks { - if !zero(stock.Quantity) { + if !stock.Quantity.IsZero() { squashed = append(squashed, stock) } } @@ -152,11 +140,11 @@ func (m *StockDistribution) consume(sell Stock) error { stock := m.Stocks[idx] // find any stock price is lower than the sell trade - if stock.Price >= sell.Price { + if stock.Price.Compare(sell.Price) >= 0 { continue } - if zero(stock.Quantity) { + if stock.Quantity.IsZero() { continue } @@ -164,7 +152,7 @@ func (m *StockDistribution) consume(sell Stock) error { sell.Consume(delta) m.Stocks[idx] = stock - if zero(sell.Quantity) { + if sell.Quantity.IsZero() { return nil } } @@ -173,7 +161,7 @@ func (m *StockDistribution) consume(sell Stock) error { for ; idx >= 0; idx-- { stock := m.Stocks[idx] - if zero(stock.Quantity) { + if stock.Quantity.IsZero() { continue } @@ -181,12 +169,12 @@ func (m *StockDistribution) consume(sell Stock) error { sell.Consume(delta) m.Stocks[idx] = stock - if zero(sell.Quantity) { + if sell.Quantity.IsZero() { return nil } } - if sell.Quantity > 0.0 { + if sell.Quantity.Sign() > 0 { m.PendingSells = append(m.PendingSells, sell) } @@ -203,7 +191,7 @@ func (m *StockDistribution) AddTrades(trades []types.Trade) (checkpoints []int, trade.Symbol = m.Symbol trade.IsBuyer = false trade.Quantity = trade.Fee - trade.Fee = 0.0 + trade.Fee = fixedpoint.Zero } } @@ -238,11 +226,11 @@ func (m *StockDistribution) AddTrades(trades []types.Trade) (checkpoints []int, func toStock(trade types.Trade) Stock { if strings.HasPrefix(trade.Symbol, trade.FeeCurrency) { if trade.IsBuyer { - trade.Quantity -= trade.Fee + trade.Quantity = trade.Quantity.Sub(trade.Fee) } else { - trade.Quantity += trade.Fee + trade.Quantity = trade.Quantity.Add(trade.Fee) } - trade.Fee = 0.0 + trade.Fee = fixedpoint.Zero } return Stock(trade) } diff --git a/pkg/accounting/cost_distribution_test.go b/pkg/accounting/cost_distribution_test.go index 2a899a6a41..3addf28afa 100644 --- a/pkg/accounting/cost_distribution_test.go +++ b/pkg/accounting/cost_distribution_test.go @@ -7,6 +7,7 @@ import ( "github.com/stretchr/testify/assert" + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" ) @@ -28,7 +29,7 @@ func TestStockManager(t *testing.T) { _, err = stockManager.AddTrades(trades) assert.NoError(t, err) - assert.Equal(t, 0.72970242, stockManager.Stocks.Quantity()) + assert.Equal(t, "0.72970242", stockManager.Stocks.Quantity().String()) assert.NotEmpty(t, stockManager.Stocks) assert.Equal(t, 20, len(stockManager.Stocks)) assert.Equal(t, 0, len(stockManager.PendingSells)) @@ -37,9 +38,9 @@ func TestStockManager(t *testing.T) { t.Run("stock", func(t *testing.T) { var trades = []types.Trade{ - {Symbol: "BTCUSDT", Price: 9100.0, Quantity: 0.05, IsBuyer: true}, - {Symbol: "BTCUSDT", Price: 9100.0, Quantity: 0.05, IsBuyer: true}, - {Symbol: "BTCUSDT", Price: 9200.0, Quantity: 0.01, IsBuyer: false}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("9100.0"), Quantity: fixedpoint.MustNewFromString("0.05"), IsBuyer: true}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("9100.0"), Quantity: fixedpoint.MustNewFromString("0.05"), IsBuyer: true}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("9200.0"), Quantity: fixedpoint.MustNewFromString("0.01"), IsBuyer: false}, } var stockManager = &StockDistribution{ @@ -53,14 +54,14 @@ func TestStockManager(t *testing.T) { assert.Equal(t, StockSlice{ { Symbol: "BTCUSDT", - Price: 9100.0, - Quantity: 0.05, + Price: fixedpoint.MustNewFromString("9100.0"), + Quantity: fixedpoint.MustNewFromString("0.05"), IsBuyer: true, }, { Symbol: "BTCUSDT", - Price: 9100.0, - Quantity: 0.04, + Price: fixedpoint.MustNewFromString("9100.0"), + Quantity: fixedpoint.MustNewFromString("0.04"), IsBuyer: true, }, }, stockManager.Stocks) @@ -69,10 +70,10 @@ func TestStockManager(t *testing.T) { t.Run("sold out", func(t *testing.T) { var trades = []types.Trade{ - {Symbol: "BTCUSDT", Price: 9100.0, Quantity: 0.05, IsBuyer: true}, - {Symbol: "BTCUSDT", Price: 9200.0, Quantity: 0.05, IsBuyer: false}, - {Symbol: "BTCUSDT", Price: 9100.0, Quantity: 0.05, IsBuyer: true}, - {Symbol: "BTCUSDT", Price: 9200.0, Quantity: 0.05, IsBuyer: false}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("9100.0"), Quantity: fixedpoint.MustNewFromString("0.05"), IsBuyer: true}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("9200.0"), Quantity: fixedpoint.MustNewFromString("0.05"), IsBuyer: false}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("9100.0"), Quantity: fixedpoint.MustNewFromString("0.05"), IsBuyer: true}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("9200.0"), Quantity: fixedpoint.MustNewFromString("0.05"), IsBuyer: false}, } var stockManager = &StockDistribution{ @@ -88,9 +89,9 @@ func TestStockManager(t *testing.T) { t.Run("oversell", func(t *testing.T) { var trades = []types.Trade{ - {Symbol: "BTCUSDT", Price: 9100.0, Quantity: 0.05, IsBuyer: true}, - {Symbol: "BTCUSDT", Price: 9200.0, Quantity: 0.05, IsBuyer: false}, - {Symbol: "BTCUSDT", Price: 9200.0, Quantity: 0.05, IsBuyer: false}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("9100.0"), Quantity: fixedpoint.MustNewFromString("0.05"), IsBuyer: true}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("9200.0"), Quantity: fixedpoint.MustNewFromString("0.05"), IsBuyer: false}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("9200.0"), Quantity: fixedpoint.MustNewFromString("0.05"), IsBuyer: false}, } var stockManager = &StockDistribution{ @@ -106,9 +107,9 @@ func TestStockManager(t *testing.T) { t.Run("loss sell", func(t *testing.T) { var trades = []types.Trade{ - {Symbol: "BTCUSDT", Price: 9100.0, Quantity: 0.05, IsBuyer: true}, - {Symbol: "BTCUSDT", Price: 9200.0, Quantity: 0.02, IsBuyer: false}, - {Symbol: "BTCUSDT", Price: 8000.0, Quantity: 0.01, IsBuyer: false}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("9100.0"), Quantity: fixedpoint.MustNewFromString("0.05"), IsBuyer: true}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("9200.0"), Quantity: fixedpoint.MustNewFromString("0.02"), IsBuyer: false}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("8000.0"), Quantity: fixedpoint.MustNewFromString("0.01"), IsBuyer: false}, } var stockManager = &StockDistribution{ @@ -122,8 +123,8 @@ func TestStockManager(t *testing.T) { assert.Equal(t, StockSlice{ { Symbol: "BTCUSDT", - Price: 9100.0, - Quantity: 0.02, + Price: fixedpoint.MustNewFromString("9100.0"), + Quantity: fixedpoint.MustNewFromString("0.02"), IsBuyer: true, }, }, stockManager.Stocks) @@ -132,8 +133,8 @@ func TestStockManager(t *testing.T) { t.Run("pending sell 1", func(t *testing.T) { var trades = []types.Trade{ - {Symbol: "BTCUSDT", Price: 9200.0, Quantity: 0.02}, - {Symbol: "BTCUSDT", Price: 9100.0, Quantity: 0.05, IsBuyer: true}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("9200.0"), Quantity: fixedpoint.MustNewFromString("0.02")}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("9100.0"), Quantity: fixedpoint.MustNewFromString("0.05"), IsBuyer: true}, } var stockManager = &StockDistribution{ @@ -147,8 +148,8 @@ func TestStockManager(t *testing.T) { assert.Equal(t, StockSlice{ { Symbol: "BTCUSDT", - Price: 9100.0, - Quantity: 0.03, + Price: fixedpoint.MustNewFromString("9100.0"), + Quantity: fixedpoint.MustNewFromString("0.03"), IsBuyer: true, }, }, stockManager.Stocks) @@ -157,8 +158,8 @@ func TestStockManager(t *testing.T) { t.Run("pending sell 2", func(t *testing.T) { var trades = []types.Trade{ - {Symbol: "BTCUSDT", Price: 9200.0, Quantity: 0.1}, - {Symbol: "BTCUSDT", Price: 9100.0, Quantity: 0.05, IsBuyer: true}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("9200.0"), Quantity: fixedpoint.MustNewFromString("0.1")}, + {Symbol: "BTCUSDT", Price: fixedpoint.MustNewFromString("9100.0"), Quantity: fixedpoint.MustNewFromString("0.05"), IsBuyer: true}, } var stockManager = &StockDistribution{ @@ -173,8 +174,8 @@ func TestStockManager(t *testing.T) { assert.Equal(t, StockSlice{ { Symbol: "BTCUSDT", - Price: 9200.0, - Quantity: 0.05, + Price: fixedpoint.MustNewFromString("9200.0"), + Quantity: fixedpoint.MustNewFromString("0.05"), IsBuyer: false, }, }, stockManager.PendingSells) diff --git a/pkg/accounting/pnl/avg_cost.go b/pkg/accounting/pnl/avg_cost.go index f05f49533f..23260bf3f1 100644 --- a/pkg/accounting/pnl/avg_cost.go +++ b/pkg/accounting/pnl/avg_cost.go @@ -1,111 +1,103 @@ package pnl import ( - "strings" + "time" + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" ) type AverageCostCalculator struct { TradingFeeCurrency string + Market types.Market } -func (c *AverageCostCalculator) Calculate(symbol string, trades []types.Trade, currentPrice float64) *AverageCostPnlReport { +func (c *AverageCostCalculator) Calculate(symbol string, trades []types.Trade, currentPrice fixedpoint.Value) *AverageCostPnlReport { // copy trades, so that we can truncate it. - var bidVolume = 0.0 - var bidAmount = 0.0 - - var askVolume = 0.0 - - var feeUSD = 0.0 - var bidFeeUSD = 0.0 - var feeRate = 0.0015 + var bidVolume = fixedpoint.Zero + var askVolume = fixedpoint.Zero + var feeUSD = fixedpoint.Zero if len(trades) == 0 { return &AverageCostPnlReport{ - Symbol: symbol, - CurrentPrice: currentPrice, - NumTrades: 0, - BuyVolume: bidVolume, - SellVolume: askVolume, - FeeInUSD: feeUSD, + Symbol: symbol, + Market: c.Market, + LastPrice: currentPrice, + NumTrades: 0, + BuyVolume: bidVolume, + SellVolume: askVolume, + FeeInUSD: feeUSD, } } - var currencyFees = map[string]float64{} - - for _, trade := range trades { - if trade.Symbol == symbol { - if trade.IsBuyer { - bidVolume += trade.Quantity - bidAmount += trade.Price * trade.Quantity - } - - // since we use USDT as the quote currency, we simply check if it matches the currency symbol - if strings.HasPrefix(trade.Symbol, trade.FeeCurrency) { - bidVolume -= trade.Fee - feeUSD += trade.Price * trade.Fee - if trade.IsBuyer { - bidFeeUSD += trade.Price * trade.Fee - } - } else if trade.FeeCurrency == "USDT" { - feeUSD += trade.Fee - if trade.IsBuyer { - bidFeeUSD += trade.Fee - } - } + var currencyFees = map[string]fixedpoint.Value{} - } else { - if trade.FeeCurrency == c.TradingFeeCurrency { - bidVolume -= trade.Fee - } - } + var position = types.NewPositionFromMarket(c.Market) + position.SetFeeRate(types.ExchangeFee{ + // binance vip 0 uses 0.075% + MakerFeeRate: fixedpoint.NewFromFloat(0.075 * 0.01), + TakerFeeRate: fixedpoint.NewFromFloat(0.075 * 0.01), + }) - if _, ok := currencyFees[trade.FeeCurrency]; !ok { - currencyFees[trade.FeeCurrency] = 0.0 - } - currencyFees[trade.FeeCurrency] += trade.Fee - } + // TODO: configure the exchange fee rate here later + // position.SetExchangeFeeRate() + var totalProfit fixedpoint.Value + var totalNetProfit fixedpoint.Value - profit := 0.0 - averageCost := (bidAmount + bidFeeUSD) / bidVolume + var tradeIDs = map[uint64]types.Trade{} - for _, t := range trades { - if t.Symbol != symbol { + for _, trade := range trades { + if _, exists := tradeIDs[trade.ID]; exists { + log.Warnf("duplicated trade: %+v", trade) continue } - if t.IsBuyer { + if trade.Symbol != symbol { continue } - profit += (t.Price - averageCost) * t.Quantity - askVolume += t.Quantity - } + profit, netProfit, madeProfit := position.AddTrade(trade) + if madeProfit { + totalProfit = totalProfit.Add(profit) + totalNetProfit = totalNetProfit.Add(netProfit) + } + + if trade.IsBuyer { + bidVolume = bidVolume.Add(trade.Quantity) + } else { + askVolume = askVolume.Add(trade.Quantity) + } - profit -= feeUSD - unrealizedProfit := profit + if _, ok := currencyFees[trade.FeeCurrency]; !ok { + currencyFees[trade.FeeCurrency] = trade.Fee + } else { + currencyFees[trade.FeeCurrency] = currencyFees[trade.FeeCurrency].Add(trade.Fee) + } - stock := bidVolume - askVolume - if stock > 0 { - stockFee := currentPrice * stock * feeRate - unrealizedProfit += (currentPrice-averageCost)*stock - stockFee + tradeIDs[trade.ID] = trade } + unrealizedProfit := currentPrice.Sub(position.AverageCost). + Mul(position.GetBase()) + return &AverageCostPnlReport{ - Symbol: symbol, - CurrentPrice: currentPrice, - NumTrades: len(trades), - StartTime: trades[0].Time, + Symbol: symbol, + Market: c.Market, + LastPrice: currentPrice, + NumTrades: len(trades), + StartTime: time.Time(trades[0].Time), BuyVolume: bidVolume, SellVolume: askVolume, - Stock: stock, - Profit: profit, - UnrealizedProfit: unrealizedProfit, - AverageBidCost: averageCost, - FeeInUSD: feeUSD, - CurrencyFees: currencyFees, + BaseAssetPosition: position.GetBase(), + Profit: totalProfit, + NetProfit: totalNetProfit, + UnrealizedProfit: unrealizedProfit, + AverageCost: position.AverageCost, + FeeInUSD: totalProfit.Sub(totalNetProfit), + CurrencyFees: currencyFees, } } diff --git a/pkg/accounting/pnl/report.go b/pkg/accounting/pnl/report.go index ad1387c588..1dcd19f109 100644 --- a/pkg/accounting/pnl/report.go +++ b/pkg/accounting/pnl/report.go @@ -1,54 +1,74 @@ package pnl import ( + "encoding/json" "strconv" "time" - log "github.com/sirupsen/logrus" + "github.com/fatih/color" "github.com/slack-go/slack" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/slack/slackstyle" "github.com/c9s/bbgo/pkg/types" ) type AverageCostPnlReport struct { - CurrentPrice float64 - StartTime time.Time - Symbol string - Market types.Market - - NumTrades int - Profit float64 - UnrealizedProfit float64 - AverageBidCost float64 - BuyVolume float64 - SellVolume float64 - FeeInUSD float64 - Stock float64 - CurrencyFees map[string]float64 + LastPrice fixedpoint.Value `json:"lastPrice"` + StartTime time.Time `json:"startTime"` + Symbol string `json:"symbol"` + Market types.Market `json:"market"` + + NumTrades int `json:"numTrades"` + Profit fixedpoint.Value `json:"profit"` + NetProfit fixedpoint.Value `json:"netProfit"` + UnrealizedProfit fixedpoint.Value `json:"unrealizedProfit"` + AverageCost fixedpoint.Value `json:"averageCost"` + BuyVolume fixedpoint.Value `json:"buyVolume,omitempty"` + SellVolume fixedpoint.Value `json:"sellVolume,omitempty"` + FeeInUSD fixedpoint.Value `json:"feeInUSD"` + BaseAssetPosition fixedpoint.Value `json:"baseAssetPosition"` + CurrencyFees map[string]fixedpoint.Value `json:"currencyFees"` +} + +func (report *AverageCostPnlReport) JSON() ([]byte, error) { + return json.MarshalIndent(report, "", " ") } func (report AverageCostPnlReport) Print() { - log.Infof("TRADES SINCE: %v", report.StartTime) - log.Infof("NUMBER OF TRADES: %d", report.NumTrades) - log.Infof("AVERAGE COST: %s", types.USD.FormatMoneyFloat64(report.AverageBidCost)) - log.Infof("TOTAL BUY VOLUME: %f", report.BuyVolume) - log.Infof("TOTAL SELL VOLUME: %f", report.SellVolume) - log.Infof("STOCK: %f", report.Stock) - log.Infof("FEE (USD): %f", report.FeeInUSD) - log.Infof("CURRENT PRICE: %s", types.USD.FormatMoneyFloat64(report.CurrentPrice)) - log.Infof("CURRENCY FEES:") + color.Green("TRADES SINCE: %v", report.StartTime) + color.Green("NUMBER OF TRADES: %d", report.NumTrades) + + color.Green("AVERAGE COST: %s", types.USD.FormatMoney(report.AverageCost)) + color.Green("BASE ASSET POSITION: %s", report.BaseAssetPosition.String()) + + color.Green("TOTAL BUY VOLUME: %v", report.BuyVolume) + color.Green("TOTAL SELL VOLUME: %v", report.SellVolume) + + color.Green("CURRENT PRICE: %s", types.USD.FormatMoney(report.LastPrice)) + color.Green("CURRENCY FEES:") for currency, fee := range report.CurrencyFees { - log.Infof(" - %s: %f", currency, fee) + color.Green(" - %s: %s", currency, fee.String()) + } + + if report.Profit.Sign() > 0 { + color.Green("PROFIT: %s", types.USD.FormatMoney(report.Profit)) + } else { + color.Red("PROFIT: %s", types.USD.FormatMoney(report.Profit)) + } + + if report.UnrealizedProfit.Sign() > 0 { + color.Green("UNREALIZED PROFIT: %s", types.USD.FormatMoney(report.UnrealizedProfit)) + } else { + color.Red("UNREALIZED PROFIT: %s", types.USD.FormatMoney(report.UnrealizedProfit)) } - log.Infof("PROFIT: %s", types.USD.FormatMoneyFloat64(report.Profit)) - log.Infof("UNREALIZED PROFIT: %s", types.USD.FormatMoneyFloat64(report.UnrealizedProfit)) } func (report AverageCostPnlReport) SlackAttachment() slack.Attachment { var color = slackstyle.Red - if report.UnrealizedProfit > 0 { + if report.UnrealizedProfit.Sign() > 0 { color = slackstyle.Green } @@ -61,10 +81,12 @@ func (report AverageCostPnlReport) SlackAttachment() slack.Attachment { Fields: []slack.AttachmentField{ {Title: "Profit", Value: types.USD.FormatMoney(report.Profit)}, {Title: "Unrealized Profit", Value: types.USD.FormatMoney(report.UnrealizedProfit)}, - {Title: "Current Price", Value: report.Market.FormatPrice(report.CurrentPrice), Short: true}, - {Title: "Average Cost", Value: report.Market.FormatPrice(report.AverageBidCost), Short: true}, - {Title: "Fee (USD)", Value: types.USD.FormatMoney(report.FeeInUSD), Short: true}, - {Title: "Stock", Value: strconv.FormatFloat(report.Stock, 'f', 8, 64), Short: true}, + {Title: "Current Price", Value: report.Market.FormatPrice(report.LastPrice), Short: true}, + {Title: "Average Cost", Value: report.Market.FormatPrice(report.AverageCost), Short: true}, + + // FIXME: + // {Title: "Fee (USD)", Value: types.USD.FormatMoney(report.FeeInUSD), Short: true}, + {Title: "Base Asset Position", Value: report.BaseAssetPosition.String(), Short: true}, {Title: "Number of Trades", Value: strconv.Itoa(report.NumTrades), Short: true}, }, Footer: report.StartTime.Format(time.RFC822), diff --git a/pkg/backtest/assets_dummy.go b/pkg/backtest/assets_dummy.go new file mode 100644 index 0000000000..a6f0fcb376 --- /dev/null +++ b/pkg/backtest/assets_dummy.go @@ -0,0 +1,65 @@ +//go:build !web +// +build !web + +package backtest + +import ( + "bytes" + "errors" + "net/http" + "os" + "time" +) + +var assets = map[string][]byte{} + +var FS = &fs{} + +type fs struct{} + +func (fs *fs) Open(name string) (http.File, error) { + if name == "/" { + return fs, nil + } + b, ok := assets[name] + if !ok { + return nil, os.ErrNotExist + } + return &file{name: name, size: len(b), Reader: bytes.NewReader(b)}, nil +} + +func (fs *fs) Close() error { return nil } +func (fs *fs) Read(p []byte) (int, error) { return 0, nil } +func (fs *fs) Seek(offset int64, whence int) (int64, error) { return 0, nil } +func (fs *fs) Stat() (os.FileInfo, error) { return fs, nil } +func (fs *fs) Name() string { return "/" } +func (fs *fs) Size() int64 { return 0 } +func (fs *fs) Mode() os.FileMode { return 0755 } +func (fs *fs) ModTime() time.Time { return time.Time{} } +func (fs *fs) IsDir() bool { return true } +func (fs *fs) Sys() interface{} { return nil } +func (fs *fs) Readdir(count int) ([]os.FileInfo, error) { + files := []os.FileInfo{} + for name, data := range assets { + files = append(files, &file{name: name, size: len(data), Reader: bytes.NewReader(data)}) + } + return files, nil +} + +type file struct { + name string + size int + *bytes.Reader +} + +func (f *file) Close() error { return nil } +func (f *file) Readdir(count int) ([]os.FileInfo, error) { + return nil, errors.New("readdir is not supported") +} +func (f *file) Stat() (os.FileInfo, error) { return f, nil } +func (f *file) Name() string { return f.name } +func (f *file) Size() int64 { return int64(f.size) } +func (f *file) Mode() os.FileMode { return 0644 } +func (f *file) ModTime() time.Time { return time.Time{} } +func (f *file) IsDir() bool { return false } +func (f *file) Sys() interface{} { return nil } diff --git a/pkg/backtest/dumper.go b/pkg/backtest/dumper.go new file mode 100644 index 0000000000..ba2139384b --- /dev/null +++ b/pkg/backtest/dumper.go @@ -0,0 +1,97 @@ +package backtest + +import ( + "fmt" + "path/filepath" + "strconv" + "time" + + "go.uber.org/multierr" + + "github.com/c9s/bbgo/pkg/data/tsv" + "github.com/c9s/bbgo/pkg/types" +) + +const DateFormat = "2006-01-02T15:04" + +type symbolInterval struct { + Symbol string + Interval types.Interval +} + +// KLineDumper dumps the received kline data into a folder for the backtest report to load the charts. +type KLineDumper struct { + OutputDirectory string + writers map[symbolInterval]*tsv.Writer + filenames map[symbolInterval]string +} + +func NewKLineDumper(outputDirectory string) *KLineDumper { + return &KLineDumper{ + OutputDirectory: outputDirectory, + writers: make(map[symbolInterval]*tsv.Writer), + filenames: make(map[symbolInterval]string), + } +} + +func (d *KLineDumper) Filenames() map[symbolInterval]string { + return d.filenames +} + +func (d *KLineDumper) formatFileName(symbol string, interval types.Interval) string { + return filepath.Join(d.OutputDirectory, fmt.Sprintf("%s-%s.tsv", + symbol, + interval)) +} + +var csvHeader = []string{"date", "startTime", "endTime", "interval", "open", "high", "low", "close", "volume"} + +func (d *KLineDumper) encode(k types.KLine) []string { + return []string{ + time.Time(k.StartTime).Format(time.ANSIC), // ANSIC date - for javascript to parse (this works with Date.parse(date_str) + strconv.FormatInt(k.StartTime.Unix(), 10), + strconv.FormatInt(k.EndTime.Unix(), 10), + k.Interval.String(), + k.Open.String(), + k.High.String(), + k.Low.String(), + k.Close.String(), + k.Volume.String(), + } +} + +func (d *KLineDumper) Record(k types.KLine) error { + si := symbolInterval{Symbol: k.Symbol, Interval: k.Interval} + + w, ok := d.writers[si] + if !ok { + filename := d.formatFileName(k.Symbol, k.Interval) + w2, err := tsv.NewWriterFile(filename) + if err != nil { + return err + } + w = w2 + + d.writers[si] = w2 + d.filenames[si] = filename + + if err2 := w2.Write(csvHeader); err2 != nil { + return err2 + } + } + + return w.Write(d.encode(k)) +} + +func (d *KLineDumper) Close() error { + var err error = nil + for _, w := range d.writers { + w.Flush() + err2 := w.Close() + if err2 != nil { + err = multierr.Append(err, err2) + } + } + + return err +} diff --git a/pkg/backtest/dumper_test.go b/pkg/backtest/dumper_test.go new file mode 100644 index 0000000000..be13a22611 --- /dev/null +++ b/pkg/backtest/dumper_test.go @@ -0,0 +1,54 @@ +package backtest + +import ( + "encoding/csv" + "os" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func TestKLineDumper(t *testing.T) { + tempDir := os.TempDir() + _ = os.Mkdir(tempDir, 0755) + dumper := NewKLineDumper(tempDir) + + t1 := time.Now() + err := dumper.Record(types.KLine{ + Exchange: types.ExchangeBinance, + Symbol: "BTCUSDT", + StartTime: types.Time(t1), + EndTime: types.Time(t1.Add(time.Minute)), + Interval: types.Interval1m, + Open: fixedpoint.NewFromFloat(1000.0), + High: fixedpoint.NewFromFloat(2000.0), + Low: fixedpoint.NewFromFloat(3000.0), + Close: fixedpoint.NewFromFloat(4000.0), + Volume: fixedpoint.NewFromFloat(5000.0), + QuoteVolume: fixedpoint.NewFromFloat(6000.0), + NumberOfTrades: 10, + Closed: true, + }) + assert.NoError(t, err) + + err = dumper.Close() + assert.NoError(t, err) + + filenames := dumper.Filenames() + assert.NotEmpty(t, filenames) + for _, filename := range filenames { + f, err := os.Open(filename) + if assert.NoError(t, err) { + reader := csv.NewReader(f) + records, err2 := reader.Read() + if assert.NoError(t, err2) { + assert.NotEmptyf(t, records, "%v", records) + } + + } + } +} diff --git a/pkg/backtest/exchange.go b/pkg/backtest/exchange.go index 82a5381c16..3badc55e2b 100644 --- a/pkg/backtest/exchange.go +++ b/pkg/backtest/exchange.go @@ -1,19 +1,53 @@ +/* +The backtest process + +The backtest engine loads the klines from the database into a kline-channel, +there are multiple matching engine that matches the order sent from the strategy. + +for each kline, the backtest engine: + +1) load the kline, run matching logics to send out order update and trades to the user data stream. +2) once the matching process for the kline is done, the kline will be pushed to the market data stream. +3) go to 1 and load the next kline. + +There are 2 ways that a strategy could work with backtest engine: + +1. the strategy receives kline from the market data stream, and then it submits the order by the given market data to the backtest engine. + backtest engine receives the order and then pushes the trade and order updates to the user data stream. + + the strategy receives the trade and update its position. + +2. the strategy places the orders when it starts. (like grid) the strategy then receives the order updates and then submit a new order + by its order update message. + +We need to ensure that: + +1. if the strategy submits the order from the market data stream, since it's a separate goroutine, the strategy should block the backtest engine + to process the trades before the next kline is published. +*/ package backtest import ( "context" "fmt" + "sync" "time" + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/cache" + "github.com/pkg/errors" "github.com/c9s/bbgo/pkg/bbgo" - "github.com/c9s/bbgo/pkg/exchange/binance" - "github.com/c9s/bbgo/pkg/exchange/max" "github.com/c9s/bbgo/pkg/service" "github.com/c9s/bbgo/pkg/types" ) +var log = logrus.WithField("cmd", "backtest") + +var ErrUnimplemented = errors.New("unimplemented method") + type Exchange struct { sourceName types.ExchangeName publicExchange types.Exchange @@ -23,47 +57,45 @@ type Exchange struct { account *types.Account config *bbgo.Backtest - stream *Stream + UserDataStream, MarketDataStream types.StandardStreamEmitter - trades map[string][]types.Trade - closedOrders map[string][]types.Order - matchingBooks map[string]*SimplePriceMatching - markets types.MarketMap - doneC chan struct{} -} + trades map[string][]types.Trade + tradesMutex sync.Mutex -func NewExchange(sourceName types.ExchangeName, srv *service.BacktestService, config *bbgo.Backtest) *Exchange { - ex, err := newPublicExchange(sourceName) - if err != nil { - panic(err) - } + closedOrders map[string][]types.Order + closedOrdersMutex sync.Mutex - if config == nil { - panic(errors.New("backtest config can not be nil")) - } + matchingBooks map[string]*SimplePriceMatching + matchingBooksMutex sync.Mutex - markets, err := bbgo.LoadExchangeMarketsWithCache(context.Background(), ex) - if err != nil { - panic(err) - } + markets types.MarketMap +} - startTime, err := config.ParseStartTime() +func NewExchange(sourceName types.ExchangeName, sourceExchange types.Exchange, srv *service.BacktestService, config *bbgo.Backtest) (*Exchange, error) { + ex := sourceExchange + + markets, err := cache.LoadExchangeMarketsWithCache(context.Background(), ex) if err != nil { - panic(err) + return nil, err } - endTime, err := config.ParseEndTime() - if err != nil { - panic(err) + var startTime, endTime time.Time + startTime = config.StartTime.Time() + if config.EndTime != nil { + endTime = config.EndTime.Time() + } else { + endTime = time.Now() } + configAccount := config.GetAccount(sourceName.String()) + account := &types.Account{ - MakerCommission: config.Account.MakerCommission, - TakerCommission: config.Account.TakerCommission, - AccountType: "SPOT", // currently not used + MakerFeeRate: configAccount.MakerFeeRate, + TakerFeeRate: configAccount.TakerFeeRate, + AccountType: types.AccountTypeSpot, } - balances := config.Account.Balances.BalanceMap() + balances := configAccount.Balances.BalanceMap() account.UpdateBalances(balances) e := &Exchange{ @@ -75,56 +107,67 @@ func NewExchange(sourceName types.ExchangeName, srv *service.BacktestService, co account: account, startTime: startTime, endTime: endTime, - matchingBooks: make(map[string]*SimplePriceMatching), closedOrders: make(map[string][]types.Order), trades: make(map[string][]types.Trade), - doneC: make(chan struct{}), } - return e + e.resetMatchingBooks() + return e, nil } -func (e *Exchange) Done() chan struct{} { - return e.doneC +func (e *Exchange) addTrade(trade types.Trade) { + e.tradesMutex.Lock() + e.trades[trade.Symbol] = append(e.trades[trade.Symbol], trade) + e.tradesMutex.Unlock() } -func (e *Exchange) NewStream() types.Stream { - if e.stream != nil { - panic("backtest stream can not be allocated twice") - } +func (e *Exchange) addClosedOrder(order types.Order) { + e.closedOrdersMutex.Lock() + e.closedOrders[order.Symbol] = append(e.closedOrders[order.Symbol], order) + e.closedOrdersMutex.Unlock() +} - e.stream = &Stream{exchange: e} +func (e *Exchange) resetMatchingBooks() { + e.matchingBooksMutex.Lock() + e.matchingBooks = make(map[string]*SimplePriceMatching) + for symbol, market := range e.markets { + e._addMatchingBook(symbol, market) + } + e.matchingBooksMutex.Unlock() +} - e.stream.OnTradeUpdate(func(trade types.Trade) { - e.trades[trade.Symbol] = append(e.trades[trade.Symbol], trade) - }) +func (e *Exchange) addMatchingBook(symbol string, market types.Market) { + e.matchingBooksMutex.Lock() + e._addMatchingBook(symbol, market) + e.matchingBooksMutex.Unlock() +} - for symbol, market := range e.markets { - matching := &SimplePriceMatching{ - CurrentTime: e.startTime, - Account: e.account, - Market: market, - MakerCommission: e.config.Account.MakerCommission, - TakerCommission: e.config.Account.TakerCommission, - } - matching.OnTradeUpdate(e.stream.EmitTradeUpdate) - matching.OnOrderUpdate(e.stream.EmitOrderUpdate) - matching.OnBalanceUpdate(e.stream.EmitBalanceUpdate) - e.matchingBooks[symbol] = matching +func (e *Exchange) _addMatchingBook(symbol string, market types.Market) { + e.matchingBooks[symbol] = &SimplePriceMatching{ + CurrentTime: e.startTime, + Account: e.account, + Market: market, } +} - return e.stream +func (e *Exchange) NewStream() types.Stream { + return &types.BacktestStream{ + StandardStreamEmitter: &types.StandardStream{}, + } } -func (e Exchange) SubmitOrders(ctx context.Context, orders ...types.SubmitOrder) (createdOrders types.OrderSlice, err error) { +func (e *Exchange) SubmitOrders(ctx context.Context, orders ...types.SubmitOrder) (createdOrders types.OrderSlice, err error) { + if e.UserDataStream == nil { + return createdOrders, fmt.Errorf("SubmitOrders should be called after UserDataStream been initialized") + } for _, order := range orders { symbol := order.Symbol - matching, ok := e.matchingBooks[symbol] + matching, ok := e.matchingBook(symbol) if !ok { return nil, fmt.Errorf("matching engine is not initialized for symbol %s", symbol) } - createdOrder, trade, err := matching.PlaceOrder(order) + createdOrder, _, err := matching.PlaceOrder(order) if err != nil { return nil, err } @@ -135,22 +178,18 @@ func (e Exchange) SubmitOrders(ctx context.Context, orders ...types.SubmitOrder) // market order can be closed immediately. switch createdOrder.Status { case types.OrderStatusFilled, types.OrderStatusCanceled, types.OrderStatusRejected: - e.closedOrders[symbol] = append(e.closedOrders[symbol], *createdOrder) + e.addClosedOrder(*createdOrder) } - e.stream.EmitOrderUpdate(*createdOrder) - } - - if trade != nil { - e.stream.EmitTradeUpdate(*trade) + e.UserDataStream.EmitOrderUpdate(*createdOrder) } } return createdOrders, nil } -func (e Exchange) QueryOpenOrders(ctx context.Context, symbol string) (orders []types.Order, err error) { - matching, ok := e.matchingBooks[symbol] +func (e *Exchange) QueryOpenOrders(ctx context.Context, symbol string) (orders []types.Order, err error) { + matching, ok := e.matchingBook(symbol) if !ok { return nil, fmt.Errorf("matching engine is not initialized for symbol %s", symbol) } @@ -158,7 +197,7 @@ func (e Exchange) QueryOpenOrders(ctx context.Context, symbol string) (orders [] return append(matching.bidOrders, matching.askOrders...), nil } -func (e Exchange) QueryClosedOrders(ctx context.Context, symbol string, since, until time.Time, lastOrderID uint64) (orders []types.Order, err error) { +func (e *Exchange) QueryClosedOrders(ctx context.Context, symbol string, since, until time.Time, lastOrderID uint64) (orders []types.Order, err error) { orders, ok := e.closedOrders[symbol] if !ok { return orders, fmt.Errorf("matching engine is not initialized for symbol %s", symbol) @@ -167,9 +206,12 @@ func (e Exchange) QueryClosedOrders(ctx context.Context, symbol string, since, u return orders, nil } -func (e Exchange) CancelOrders(ctx context.Context, orders ...types.Order) error { +func (e *Exchange) CancelOrders(ctx context.Context, orders ...types.Order) error { + if e.UserDataStream == nil { + return fmt.Errorf("CancelOrders should be called after UserDataStream been initialized") + } for _, order := range orders { - matching, ok := e.matchingBooks[order.Symbol] + matching, ok := e.matchingBook(order.Symbol) if !ok { return fmt.Errorf("matching engine is not initialized for symbol %s", order.Symbol) } @@ -178,13 +220,13 @@ func (e Exchange) CancelOrders(ctx context.Context, orders ...types.Order) error return err } - e.stream.EmitOrderUpdate(canceledOrder) + e.UserDataStream.EmitOrderUpdate(canceledOrder) } return nil } -func (e Exchange) QueryAccount(ctx context.Context) (*types.Account, error) { +func (e *Exchange) QueryAccount(ctx context.Context) (*types.Account, error) { return e.account, nil } @@ -192,49 +234,155 @@ func (e *Exchange) QueryAccountBalances(ctx context.Context) (types.BalanceMap, return e.account.Balances(), nil } -func (e Exchange) QueryKLines(ctx context.Context, symbol string, interval types.Interval, options types.KLineQueryOptions) ([]types.KLine, error) { +func (e *Exchange) QueryKLines(ctx context.Context, symbol string, interval types.Interval, options types.KLineQueryOptions) ([]types.KLine, error) { if options.EndTime != nil { - return e.srv.QueryKLinesBackward(e.sourceName, symbol, interval, *options.EndTime) + return e.srv.QueryKLinesBackward(e.sourceName, symbol, interval, *options.EndTime, 1000) } + if options.StartTime != nil { - return e.srv.QueryKLinesForward(e.sourceName, symbol, interval, *options.StartTime) + return e.srv.QueryKLinesForward(e.sourceName, symbol, interval, *options.StartTime, 1000) } return nil, errors.New("endTime or startTime can not be nil") } -func (e Exchange) QueryTrades(ctx context.Context, symbol string, options *types.TradeQueryOptions) ([]types.Trade, error) { +func (e *Exchange) QueryTrades(ctx context.Context, symbol string, options *types.TradeQueryOptions) ([]types.Trade, error) { // we don't need query trades for backtest return nil, nil } -func (e Exchange) Name() types.ExchangeName { +func (e *Exchange) QueryTicker(ctx context.Context, symbol string) (*types.Ticker, error) { + matching, ok := e.matchingBook(symbol) + if !ok { + return nil, fmt.Errorf("matching engine is not initialized for symbol %s", symbol) + } + + kline := matching.LastKLine + return &types.Ticker{ + Time: kline.EndTime.Time(), + Volume: kline.Volume, + Last: kline.Close, + Open: kline.Open, + High: kline.High, + Low: kline.Low, + Buy: kline.Close, + Sell: kline.Close, + }, nil +} + +func (e *Exchange) QueryTickers(ctx context.Context, symbol ...string) (map[string]types.Ticker, error) { + // Not using Tickers in back test (yet) + return nil, ErrUnimplemented +} + +func (e *Exchange) Name() types.ExchangeName { return e.publicExchange.Name() } -func (e Exchange) PlatformFeeCurrency() string { +func (e *Exchange) PlatformFeeCurrency() string { return e.publicExchange.PlatformFeeCurrency() } -func (e Exchange) QueryMarkets(ctx context.Context) (types.MarketMap, error) { - return e.publicExchange.QueryMarkets(ctx) +func (e *Exchange) QueryMarkets(ctx context.Context) (types.MarketMap, error) { + return e.markets, nil } -func (e Exchange) QueryDepositHistory(ctx context.Context, asset string, since, until time.Time) (allDeposits []types.Deposit, err error) { +func (e *Exchange) QueryDepositHistory(ctx context.Context, asset string, since, until time.Time) (allDeposits []types.Deposit, err error) { return nil, nil } -func (e Exchange) QueryWithdrawHistory(ctx context.Context, asset string, since, until time.Time) (allWithdraws []types.Withdraw, err error) { +func (e *Exchange) QueryWithdrawHistory(ctx context.Context, asset string, since, until time.Time) (allWithdraws []types.Withdraw, err error) { return nil, nil } -func newPublicExchange(sourceExchange types.ExchangeName) (types.Exchange, error) { - switch sourceExchange { - case types.ExchangeBinance: - return binance.New("", ""), nil - case types.ExchangeMax: - return max.New("", ""), nil +func (e *Exchange) matchingBook(symbol string) (*SimplePriceMatching, bool) { + e.matchingBooksMutex.Lock() + m, ok := e.matchingBooks[symbol] + e.matchingBooksMutex.Unlock() + return m, ok +} + +func (e *Exchange) InitMarketData() { + e.UserDataStream.OnTradeUpdate(func(trade types.Trade) { + e.addTrade(trade) + }) + + e.matchingBooksMutex.Lock() + for _, matching := range e.matchingBooks { + matching.OnTradeUpdate(e.UserDataStream.EmitTradeUpdate) + matching.OnOrderUpdate(e.UserDataStream.EmitOrderUpdate) + matching.OnBalanceUpdate(e.UserDataStream.EmitBalanceUpdate) + } + e.matchingBooksMutex.Unlock() +} + +func (e *Exchange) SubscribeMarketData(extraIntervals ...types.Interval) (chan types.KLine, error) { + log.Infof("collecting backtest configurations...") + + loadedSymbols := map[string]struct{}{} + loadedIntervals := map[types.Interval]struct{}{ + // 1m interval is required for the backtest matching engine + types.Interval1m: {}, + } + + for _, it := range extraIntervals { + loadedIntervals[it] = struct{}{} + } + + // collect subscriptions + for _, sub := range e.MarketDataStream.GetSubscriptions() { + loadedSymbols[sub.Symbol] = struct{}{} + + switch sub.Channel { + case types.KLineChannel: + loadedIntervals[sub.Options.Interval] = struct{}{} + + default: + // Since Environment is not yet been injected at this point, no hard error + log.Errorf("stream channel %s is not supported in backtest", sub.Channel) + } + } + + var symbols []string + for symbol := range loadedSymbols { + symbols = append(symbols, symbol) + } + + var intervals []types.Interval + for interval := range loadedIntervals { + intervals = append(intervals, interval) + } + + log.Infof("using symbols: %v and intervals: %v for back-testing", symbols, intervals) + log.Infof("querying klines from database...") + klineC, errC := e.srv.QueryKLinesCh(e.startTime, e.endTime, e, symbols, intervals) + go func() { + if err := <-errC; err != nil { + log.WithError(err).Error("backtest data feed error") + } + }() + return klineC, nil +} + +func (e *Exchange) ConsumeKLine(k types.KLine) { + if k.Interval == types.Interval1m { + matching, ok := e.matchingBook(k.Symbol) + if !ok { + log.Errorf("matching book of %s is not initialized", k.Symbol) + return + } + + // here we generate trades and order updates + matching.processKLine(k) } - return nil, fmt.Errorf("exchange %s is not supported", sourceExchange) + e.MarketDataStream.EmitKLineClosed(k) +} + +func (e *Exchange) CloseMarketData() error { + if err := e.MarketDataStream.Close(); err != nil { + log.WithError(err).Error("stream close error") + return err + } + return nil } diff --git a/pkg/backtest/exchange_klinec.go b/pkg/backtest/exchange_klinec.go new file mode 100644 index 0000000000..92a5269c27 --- /dev/null +++ b/pkg/backtest/exchange_klinec.go @@ -0,0 +1,12 @@ +package backtest + +import ( + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/types" +) + +type ExchangeDataSource struct { + C chan types.KLine + Exchange *Exchange + Session *bbgo.ExchangeSession +} diff --git a/pkg/backtest/manifests.go b/pkg/backtest/manifests.go new file mode 100644 index 0000000000..c457e91f5e --- /dev/null +++ b/pkg/backtest/manifests.go @@ -0,0 +1,47 @@ +package backtest + +import "encoding/json" + +type ManifestEntry struct { + Type string `json:"type"` + Filename string `json:"filename"` + StrategyID string `json:"strategyID"` + StrategyInstance string `json:"strategyInstance"` + StrategyProperty string `json:"strategyProperty"` +} + +type Manifests map[InstancePropertyIndex]string + +func (m *Manifests) UnmarshalJSON(j []byte) error { + var entries []ManifestEntry + if err := json.Unmarshal(j, &entries); err != nil { + return err + } + + mm := make(Manifests) + for _, entry := range entries { + index := InstancePropertyIndex{ + ID: entry.StrategyID, + InstanceID: entry.StrategyInstance, + Property: entry.StrategyProperty, + } + mm[index] = entry.Filename + } + *m = mm + return nil +} + +func (m Manifests) MarshalJSON() ([]byte, error) { + var arr []ManifestEntry + for k, v := range m { + arr = append(arr, ManifestEntry{ + Type: "strategyProperty", + Filename: v, + StrategyID: k.ID, + StrategyInstance: k.InstanceID, + StrategyProperty: k.Property, + }) + + } + return json.MarshalIndent(arr, "", " ") +} diff --git a/pkg/backtest/matching.go b/pkg/backtest/matching.go index 24b687a567..9ea4c64df9 100644 --- a/pkg/backtest/matching.go +++ b/pkg/backtest/matching.go @@ -11,14 +11,9 @@ import ( "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" + "github.com/c9s/bbgo/pkg/util" ) -// DefaultFeeRate set the fee rate for most cases -// BINANCE uses 0.1% for both maker and taker -// for BNB holders, it's 0.075% for both maker and taker -// MAX uses 0.050% for maker and 0.15% for taker -const DefaultFeeRate = 0.15 * 0.001 - var orderID uint64 = 1 var tradeID uint64 = 1 @@ -30,24 +25,45 @@ func incTradeID() uint64 { return atomic.AddUint64(&tradeID, 1) } +var klineMatchingLogger *logrus.Entry = nil + +// FeeToken is used to simulate the exchange platform fee token +// This is to ease the back-testing environment for closing positions. +const FeeToken = "FEE" + +var useFeeToken = true + +func init() { + logger := logrus.New() + if v, ok := util.GetEnvVarBool("DEBUG_MATCHING"); ok && v { + logger.SetLevel(logrus.DebugLevel) + } else { + logger.SetLevel(logrus.ErrorLevel) + } + klineMatchingLogger = logger.WithField("backtest", "klineEngine") + + if v, ok := util.GetEnvVarBool("BACKTEST_USE_FEE_TOKEN"); ok { + useFeeToken = v + } +} + // SimplePriceMatching implements a simple kline data driven matching engine for backtest //go:generate callbackgen -type SimplePriceMatching type SimplePriceMatching struct { Symbol string Market types.Market - mu sync.Mutex - bidOrders []types.Order - askOrders []types.Order + mu sync.Mutex + bidOrders []types.Order + askOrders []types.Order + closedOrders []types.Order LastPrice fixedpoint.Value + LastKLine types.KLine CurrentTime time.Time Account *types.Account - MakerCommission int `json:"makerCommission"` - TakerCommission int `json:"takerCommission"` - tradeUpdateCallbacks []func(trade types.Trade) orderUpdateCallbacks []func(order types.Order) balanceUpdateCallbacks []func(balances types.BalanceMap) @@ -87,19 +103,17 @@ func (m *SimplePriceMatching) CancelOrder(o types.Order) (types.Order, error) { } if !found { - logrus.Panicf("cancel order failed, order %d not found: %+v", o.OrderID, o) - return o, fmt.Errorf("cancel order failed, order %d not found: %+v", o.OrderID, o) } switch o.Side { case types.SideTypeBuy: - if err := m.Account.UnlockBalance(m.Market.QuoteCurrency, fixedpoint.NewFromFloat(o.Price*o.Quantity)); err != nil { + if err := m.Account.UnlockBalance(m.Market.QuoteCurrency, o.Price.Mul(o.Quantity)); err != nil { return o, err } case types.SideTypeSell: - if err := m.Account.UnlockBalance(m.Market.BaseCurrency, fixedpoint.NewFromFloat(o.Quantity)); err != nil { + if err := m.Account.UnlockBalance(m.Market.BaseCurrency, o.Quantity); err != nil { return o, err } } @@ -111,26 +125,37 @@ func (m *SimplePriceMatching) CancelOrder(o types.Order) (types.Order, error) { } func (m *SimplePriceMatching) PlaceOrder(o types.SubmitOrder) (closedOrders *types.Order, trades *types.Trade, err error) { - - // price for checking account balance + // price for checking account balance, default price price := o.Price + switch o.Type { case types.OrderTypeMarket: - price = m.LastPrice.Float64() - case types.OrderTypeLimit: + if m.LastPrice.IsZero() { + panic("unexpected: last price can not be zero") + } + + price = m.LastPrice + case types.OrderTypeLimit, types.OrderTypeLimitMaker: price = o.Price } + if o.Quantity.Compare(m.Market.MinQuantity) < 0 { + return nil, nil, fmt.Errorf("order quantity %s is less than minQuantity %s, order: %+v", o.Quantity.String(), m.Market.MinQuantity.String(), o) + } + + quoteQuantity := o.Quantity.Mul(price) + if quoteQuantity.Compare(m.Market.MinNotional) < 0 { + return nil, nil, fmt.Errorf("order amount %s is less than minNotional %s, order: %+v", quoteQuantity.String(), m.Market.MinNotional.String(), o) + } + switch o.Side { case types.SideTypeBuy: - quote := price * o.Quantity - if err := m.Account.LockBalance(m.Market.QuoteCurrency, fixedpoint.NewFromFloat(quote)); err != nil { + if err := m.Account.LockBalance(m.Market.QuoteCurrency, quoteQuantity); err != nil { return nil, nil, err } case types.SideTypeSell: - baseQuantity := o.Quantity - if err := m.Account.LockBalance(m.Market.BaseCurrency, fixedpoint.NewFromFloat(baseQuantity)); err != nil { + if err := m.Account.LockBalance(m.Market.BaseCurrency, o.Quantity); err != nil { return nil, nil, err } } @@ -142,22 +167,29 @@ func (m *SimplePriceMatching) PlaceOrder(o types.SubmitOrder) (closedOrders *typ order := m.newOrder(o, orderID) if o.Type == types.OrderTypeMarket { + // emit the order update for Status:New m.EmitOrderUpdate(order) + // copy the order object to avoid side effect (for different callbacks) + var order2 = order + // emit trade before we publish order - trade := m.newTradeFromOrder(order, false) + trade := m.newTradeFromOrder(&order2, false) m.executeTrade(trade) // update the order status - order.Status = types.OrderStatusFilled - order.ExecutedQuantity = order.Quantity - order.Price = price - m.EmitOrderUpdate(order) - m.EmitBalanceUpdate(m.Account.Balances()) - return &order, &trade, nil + order2.Status = types.OrderStatusFilled + order2.ExecutedQuantity = order2.Quantity + order2.Price = price + order2.IsWorking = false + + // let the exchange emit the "FILLED" order update (we need the closed order) + // m.EmitOrderUpdate(order2) + return &order2, &trade, nil } - // for limit maker orders + // For limit maker orders (open status) + // TODO: handle limit taker order switch o.Side { case types.SideTypeBuy: @@ -171,8 +203,7 @@ func (m *SimplePriceMatching) PlaceOrder(o types.SubmitOrder) (closedOrders *typ m.mu.Unlock() } - m.EmitOrderUpdate(order) - + m.EmitOrderUpdate(order) // emit order New status return &order, nil, nil } @@ -180,13 +211,24 @@ func (m *SimplePriceMatching) executeTrade(trade types.Trade) { var err error // execute trade, update account balances if trade.IsBuyer { - err = m.Account.UseLockedBalance(m.Market.QuoteCurrency, fixedpoint.NewFromFloat(trade.Price*trade.Quantity)) + err = m.Account.UseLockedBalance(m.Market.QuoteCurrency, trade.QuoteQuantity) - _ = m.Account.AddBalance(m.Market.BaseCurrency, fixedpoint.NewFromFloat(trade.Quantity)) + // here the fee currency is the base currency + q := trade.Quantity + if trade.FeeCurrency == m.Market.BaseCurrency { + q = q.Sub(trade.Fee) + } + + m.Account.AddBalance(m.Market.BaseCurrency, q) } else { - err = m.Account.UseLockedBalance(m.Market.BaseCurrency, fixedpoint.NewFromFloat(trade.Quantity)) + err = m.Account.UseLockedBalance(m.Market.BaseCurrency, trade.Quantity) - _ = m.Account.AddBalance(m.Market.QuoteCurrency, fixedpoint.NewFromFloat(trade.Quantity*trade.Price)) + // here the fee currency is the quote currency + qq := trade.QuoteQuantity + if trade.FeeCurrency == m.Market.QuoteCurrency { + qq = qq.Sub(trade.Fee) + } + m.Account.AddBalance(m.Market.QuoteCurrency, qq) } if err != nil { @@ -195,54 +237,73 @@ func (m *SimplePriceMatching) executeTrade(trade types.Trade) { m.EmitTradeUpdate(trade) m.EmitBalanceUpdate(m.Account.Balances()) - return } -func (m *SimplePriceMatching) newTradeFromOrder(order types.Order, isMaker bool) types.Trade { +func (m *SimplePriceMatching) newTradeFromOrder(order *types.Order, isMaker bool) types.Trade { // BINANCE uses 0.1% for both maker and taker // MAX uses 0.050% for maker and 0.15% for taker - var commission = DefaultFeeRate - if isMaker && m.Account.MakerCommission > 0 { - commission = 0.0001 * float64(m.Account.MakerCommission) // binance uses 10~15 - } else if m.Account.TakerCommission > 0 { - commission = 0.0001 * float64(m.Account.TakerCommission) // binance uses 10~15 + var feeRate fixedpoint.Value + if isMaker { + feeRate = m.Account.MakerFeeRate + } else { + feeRate = m.Account.TakerFeeRate + } + + price := order.Price + switch order.Type { + case types.OrderTypeMarket, types.OrderTypeStopMarket: + if m.LastPrice.IsZero() { + panic("unexpected: last price can not be zero") + } + + price = m.LastPrice } - var fee float64 + var quoteQuantity = order.Quantity.Mul(price) + var fee fixedpoint.Value var feeCurrency string - switch order.Side { + if useFeeToken { + feeCurrency = FeeToken + fee = quoteQuantity.Mul(feeRate) + } else { + switch order.Side { - case types.SideTypeBuy: - fee = order.Quantity * commission - feeCurrency = m.Market.BaseCurrency + case types.SideTypeBuy: + fee = order.Quantity.Mul(feeRate) + feeCurrency = m.Market.BaseCurrency - case types.SideTypeSell: - fee = order.Quantity * order.Price * commission - feeCurrency = m.Market.QuoteCurrency + case types.SideTypeSell: + fee = quoteQuantity.Mul(feeRate) + feeCurrency = m.Market.QuoteCurrency + } } + // update order time + order.UpdateTime = types.Time(m.CurrentTime) + var id = incTradeID() return types.Trade{ - ID: int64(id), + ID: id, OrderID: order.OrderID, Exchange: "backtest", - Price: order.Price, + Price: price, Quantity: order.Quantity, - QuoteQuantity: order.Quantity * order.Price, + QuoteQuantity: quoteQuantity, Symbol: order.Symbol, Side: order.Side, IsBuyer: order.Side == types.SideTypeBuy, IsMaker: isMaker, - Time: m.CurrentTime, + Time: types.Time(m.CurrentTime), Fee: fee, FeeCurrency: feeCurrency, } } func (m *SimplePriceMatching) BuyToPrice(price fixedpoint.Value) (closedOrders []types.Order, trades []types.Trade) { - var priceF = price.Float64() + klineMatchingLogger.Debugf("kline buy to price %s", price.String()) + var askOrders []types.Order for _, o := range m.askOrders { @@ -250,7 +311,7 @@ func (m *SimplePriceMatching) BuyToPrice(price fixedpoint.Value) (closedOrders [ case types.OrderTypeStopMarket: // should we trigger the order - if priceF <= o.StopPrice { + if price.Compare(o.StopPrice) <= 0 { // not triggering it, put it back askOrders = append(askOrders, o) break @@ -258,20 +319,13 @@ func (m *SimplePriceMatching) BuyToPrice(price fixedpoint.Value) (closedOrders [ o.Type = types.OrderTypeMarket o.ExecutedQuantity = o.Quantity - o.Price = priceF + o.Price = price o.Status = types.OrderStatusFilled closedOrders = append(closedOrders, o) - trade := m.newTradeFromOrder(o, false) - m.executeTrade(trade) - - trades = append(trades, trade) - - m.EmitOrderUpdate(o) - case types.OrderTypeStopLimit: // should we trigger the order? - if priceF <= o.StopPrice { + if price.Compare(o.StopPrice) <= 0 { askOrders = append(askOrders, o) break } @@ -279,34 +333,26 @@ func (m *SimplePriceMatching) BuyToPrice(price fixedpoint.Value) (closedOrders [ o.Type = types.OrderTypeLimit // is it a taker order? - if priceF >= o.Price { + if price.Compare(o.Price) >= 0 { + if o.Price.Compare(m.LastKLine.Low) < 0 { + o.Price = m.LastKLine.Low + } o.ExecutedQuantity = o.Quantity o.Status = types.OrderStatusFilled closedOrders = append(closedOrders, o) - - trade := m.newTradeFromOrder(o, false) - m.executeTrade(trade) - - trades = append(trades, trade) - - m.EmitOrderUpdate(o) } else { // maker order askOrders = append(askOrders, o) } - case types.OrderTypeLimit: - if priceF >= o.Price { + case types.OrderTypeLimit, types.OrderTypeLimitMaker: + if price.Compare(o.Price) >= 0 { + if o.Price.Compare(m.LastKLine.Low) < 0 { + o.Price = m.LastKLine.Low + } o.ExecutedQuantity = o.Quantity o.Status = types.OrderStatusFilled closedOrders = append(closedOrders, o) - - trade := m.newTradeFromOrder(o, true) - m.executeTrade(trade) - - trades = append(trades, trade) - - m.EmitOrderUpdate(o) } else { askOrders = append(askOrders, o) } @@ -320,49 +366,52 @@ func (m *SimplePriceMatching) BuyToPrice(price fixedpoint.Value) (closedOrders [ m.askOrders = askOrders m.LastPrice = price + for i := range closedOrders { + o := closedOrders[i] + trade := m.newTradeFromOrder(&o, true) + m.executeTrade(trade) + closedOrders[i] = o + + trades = append(trades, trade) + + m.EmitOrderUpdate(o) + } + m.closedOrders = append(m.closedOrders, closedOrders...) + return closedOrders, trades } func (m *SimplePriceMatching) SellToPrice(price fixedpoint.Value) (closedOrders []types.Order, trades []types.Trade) { - var sellPrice = price.Float64() + klineMatchingLogger.Debugf("kline sell to price %s", price.String()) + + var sellPrice = price var bidOrders []types.Order for _, o := range m.bidOrders { switch o.Type { case types.OrderTypeStopMarket: // should we trigger the order - if sellPrice <= o.StopPrice { + if sellPrice.Compare(o.StopPrice) <= 0 { o.ExecutedQuantity = o.Quantity o.Price = sellPrice o.Status = types.OrderStatusFilled closedOrders = append(closedOrders, o) - - trade := m.newTradeFromOrder(o, false) - m.executeTrade(trade) - - trades = append(trades, trade) - - m.EmitOrderUpdate(o) } else { bidOrders = append(bidOrders, o) } case types.OrderTypeStopLimit: // should we trigger the order - if sellPrice <= o.StopPrice { + if sellPrice.Compare(o.StopPrice) <= 0 { o.Type = types.OrderTypeLimit - if sellPrice <= o.Price { + if sellPrice.Compare(o.Price) <= 0 { + if o.Price.Compare(m.LastKLine.High) > 0 { + o.Price = m.LastKLine.High + } o.ExecutedQuantity = o.Quantity o.Status = types.OrderStatusFilled closedOrders = append(closedOrders, o) - - trade := m.newTradeFromOrder(o, false) - m.executeTrade(trade) - - trades = append(trades, trade) - m.EmitOrderUpdate(o) - } else { bidOrders = append(bidOrders, o) } @@ -370,18 +419,11 @@ func (m *SimplePriceMatching) SellToPrice(price fixedpoint.Value) (closedOrders bidOrders = append(bidOrders, o) } - case types.OrderTypeLimit: - if sellPrice <= o.Price { + case types.OrderTypeLimit, types.OrderTypeLimitMaker: + if sellPrice.Compare(o.Price) <= 0 { o.ExecutedQuantity = o.Quantity o.Status = types.OrderStatusFilled closedOrders = append(closedOrders, o) - - trade := m.newTradeFromOrder(o, true) - m.executeTrade(trade) - - trades = append(trades, trade) - - m.EmitOrderUpdate(o) } else { bidOrders = append(bidOrders, o) } @@ -394,35 +436,62 @@ func (m *SimplePriceMatching) SellToPrice(price fixedpoint.Value) (closedOrders m.bidOrders = bidOrders m.LastPrice = price + for i := range closedOrders { + o := closedOrders[i] + trade := m.newTradeFromOrder(&o, true) + m.executeTrade(trade) + closedOrders[i] = o + + trades = append(trades, trade) + + m.EmitOrderUpdate(o) + } + m.closedOrders = append(m.closedOrders, closedOrders...) + return closedOrders, trades } func (m *SimplePriceMatching) processKLine(kline types.KLine) { - m.CurrentTime = kline.EndTime + m.CurrentTime = kline.EndTime.Time() + m.LastKLine = kline + if m.LastPrice.IsZero() { + m.LastPrice = kline.Open + } else { + if m.LastPrice.Compare(kline.Open) > 0 { + m.SellToPrice(kline.Open) + } else { + m.BuyToPrice(kline.Open) + } + } switch kline.Direction() { case types.DirectionDown: - if kline.High > kline.Open { - m.BuyToPrice(fixedpoint.NewFromFloat(kline.High)) + if kline.High.Compare(kline.Open) >= 0 { + m.BuyToPrice(kline.High) } - if kline.Low > kline.Close { - m.SellToPrice(fixedpoint.NewFromFloat(kline.Low)) - m.BuyToPrice(fixedpoint.NewFromFloat(kline.Close)) + // if low is lower than close, sell to low first, and then buy up to close + if kline.Low.Compare(kline.Close) < 0 { + m.SellToPrice(kline.Low) + m.BuyToPrice(kline.Close) } else { - m.SellToPrice(fixedpoint.NewFromFloat(kline.Close)) + m.SellToPrice(kline.Close) } case types.DirectionUp: - if kline.Low < kline.Open { - m.SellToPrice(fixedpoint.NewFromFloat(kline.Low)) + if kline.Low.Compare(kline.Open) <= 0 { + m.SellToPrice(kline.Low) } - if kline.High > kline.Close { - m.BuyToPrice(fixedpoint.NewFromFloat(kline.High)) - m.SellToPrice(fixedpoint.NewFromFloat(kline.Close)) + if kline.High.Compare(kline.Close) > 0 { + m.BuyToPrice(kline.High) + m.SellToPrice(kline.Close) } else { - m.BuyToPrice(fixedpoint.NewFromFloat(kline.Close)) + m.BuyToPrice(kline.Close) + } + default: // no trade up or down + if m.LastPrice.IsZero() { + m.BuyToPrice(kline.Close) } } @@ -432,11 +501,11 @@ func (m *SimplePriceMatching) newOrder(o types.SubmitOrder, orderID uint64) type return types.Order{ OrderID: orderID, SubmitOrder: o, - Exchange: "backtest", + Exchange: types.ExchangeBacktest, Status: types.OrderStatusNew, - ExecutedQuantity: 0, + ExecutedQuantity: fixedpoint.Zero, IsWorking: true, - CreationTime: m.CurrentTime, - UpdateTime: m.CurrentTime, + CreationTime: types.Time(m.CurrentTime), + UpdateTime: types.Time(m.CurrentTime), } } diff --git a/pkg/backtest/matching_test.go b/pkg/backtest/matching_test.go index 30022d70a1..94a3a45a26 100644 --- a/pkg/backtest/matching_test.go +++ b/pkg/backtest/matching_test.go @@ -15,16 +15,144 @@ func newLimitOrder(symbol string, side types.SideType, price, quantity float64) Symbol: symbol, Side: side, Type: types.OrderTypeLimit, - Quantity: quantity, - Price: price, - TimeInForce: "GTC", + Quantity: fixedpoint.NewFromFloat(quantity), + Price: fixedpoint.NewFromFloat(price), + TimeInForce: types.TimeInForceGTC, } } -func TestSimplePriceMatching_LimitOrder(t *testing.T) { +func TestSimplePriceMatching_orderUpdate(t *testing.T) { account := &types.Account{ - MakerCommission: 15, - TakerCommission: 15, + MakerFeeRate: fixedpoint.NewFromFloat(0.075 * 0.01), + TakerFeeRate: fixedpoint.NewFromFloat(0.075 * 0.01), + } + account.UpdateBalances(types.BalanceMap{ + "USDT": {Currency: "USDT", Available: fixedpoint.NewFromFloat(10000.0)}, + }) + market := types.Market{ + Symbol: "BTCUSDT", + PricePrecision: 8, + VolumePrecision: 8, + QuoteCurrency: "USDT", + BaseCurrency: "BTC", + MinNotional: fixedpoint.MustNewFromString("0.001"), + MinAmount: fixedpoint.MustNewFromString("10.0"), + MinQuantity: fixedpoint.MustNewFromString("0.001"), + } + + t1 := time.Date(2021, 7, 1, 0, 0, 0, 0, time.UTC) + engine := &SimplePriceMatching{ + Account: account, + Market: market, + CurrentTime: t1, + } + + orderUpdateCnt := 0 + orderUpdateNewStatusCnt := 0 + orderUpdateFilledStatusCnt := 0 + var lastOrder types.Order + engine.OnOrderUpdate(func(order types.Order) { + lastOrder = order + + orderUpdateCnt++ + switch order.Status { + case types.OrderStatusNew: + orderUpdateNewStatusCnt++ + + case types.OrderStatusFilled: + orderUpdateFilledStatusCnt++ + + } + }) + + _, _, err := engine.PlaceOrder(newLimitOrder("BTCUSDT", types.SideTypeBuy, 24000.0, 0.1)) + assert.NoError(t, err) + assert.Equal(t, 1, orderUpdateCnt) // should got new status + assert.Equal(t, 1, orderUpdateNewStatusCnt) // should got new status + assert.Equal(t, 0, orderUpdateFilledStatusCnt) // should got new status + assert.Equal(t, types.OrderStatusNew, lastOrder.Status) + assert.Equal(t, fixedpoint.NewFromFloat(0.0), lastOrder.ExecutedQuantity) + + t2 := t1.Add(time.Minute) + + // should match 25000, 24000 + k := newKLine("BTCUSDT", types.Interval1m, t2, 26000, 27000, 23000, 25000) + engine.processKLine(k) + + assert.Equal(t, 2, orderUpdateCnt) // should got new and filled + assert.Equal(t, 1, orderUpdateNewStatusCnt) // should got new status + assert.Equal(t, 1, orderUpdateFilledStatusCnt) // should got new status + assert.Equal(t, types.OrderStatusFilled, lastOrder.Status) + assert.Equal(t, "0.1", lastOrder.ExecutedQuantity.String()) + assert.Equal(t, lastOrder.Quantity.String(), lastOrder.ExecutedQuantity.String()) +} + +func TestSimplePriceMatching_processKLine(t *testing.T) { + account := &types.Account{ + MakerFeeRate: fixedpoint.NewFromFloat(0.075 * 0.01), + TakerFeeRate: fixedpoint.NewFromFloat(0.075 * 0.01), + } + account.UpdateBalances(types.BalanceMap{ + "USDT": {Currency: "USDT", Available: fixedpoint.NewFromFloat(10000.0)}, + }) + market := types.Market{ + Symbol: "BTCUSDT", + PricePrecision: 8, + VolumePrecision: 8, + QuoteCurrency: "USDT", + BaseCurrency: "BTC", + MinNotional: fixedpoint.MustNewFromString("0.001"), + MinAmount: fixedpoint.MustNewFromString("10.0"), + MinQuantity: fixedpoint.MustNewFromString("0.001"), + } + + t1 := time.Date(2021, 7, 1, 0, 0, 0, 0, time.UTC) + engine := &SimplePriceMatching{ + Account: account, + Market: market, + CurrentTime: t1, + } + + for i := 0; i <= 5; i++ { + var p = 20000.0 + float64(i)*1000.0 + _, _, err := engine.PlaceOrder(newLimitOrder("BTCUSDT", types.SideTypeBuy, p, 0.001)) + assert.NoError(t, err) + } + + t2 := t1.Add(time.Minute) + + // should match 25000, 24000 + k := newKLine("BTCUSDT", types.Interval1m, t2, 26000, 27000, 23000, 25000) + assert.Equal(t, t2.Add(time.Minute-time.Millisecond), k.EndTime.Time()) + + engine.processKLine(k) + assert.Equal(t, 3, len(engine.bidOrders)) + assert.Len(t, engine.bidOrders, 3) + assert.Equal(t, 3, len(engine.closedOrders)) + + for _, o := range engine.closedOrders { + assert.Equal(t, k.EndTime.Time(), o.UpdateTime.Time()) + } +} + +func newKLine(symbol string, interval types.Interval, startTime time.Time, o, h, l, c float64) types.KLine { + return types.KLine{ + Symbol: symbol, + StartTime: types.Time(startTime), + EndTime: types.Time(startTime.Add(interval.Duration() - time.Millisecond)), + Interval: interval, + Open: fixedpoint.NewFromFloat(o), + High: fixedpoint.NewFromFloat(h), + Low: fixedpoint.NewFromFloat(l), + Close: fixedpoint.NewFromFloat(c), + Closed: true, + } +} + +func TestSimplePriceMatching_PlaceLimitOrder(t *testing.T) { + account := &types.Account{ + MakerFeeRate: fixedpoint.NewFromFloat(0.075 * 0.01), + TakerFeeRate: fixedpoint.NewFromFloat(0.075 * 0.01), } account.UpdateBalances(types.BalanceMap{ @@ -38,16 +166,14 @@ func TestSimplePriceMatching_LimitOrder(t *testing.T) { VolumePrecision: 8, QuoteCurrency: "USDT", BaseCurrency: "BTC", - MinNotional: 0.001, - MinAmount: 10.0, - MinLot: 0.001, - MinQuantity: 0.001, + MinNotional: fixedpoint.MustNewFromString("0.001"), + MinAmount: fixedpoint.MustNewFromString("10.0"), + MinQuantity: fixedpoint.MustNewFromString("0.001"), } engine := &SimplePriceMatching{ - CurrentTime: time.Now(), - Account: account, - Market: market, + Account: account, + Market: market, } for i := 0; i < 5; i++ { diff --git a/pkg/backtest/priceorder.go b/pkg/backtest/priceorder.go index 9a0e52ae4b..c74e636d42 100644 --- a/pkg/backtest/priceorder.go +++ b/pkg/backtest/priceorder.go @@ -15,7 +15,7 @@ type PriceOrder struct { type PriceOrderSlice []PriceOrder func (slice PriceOrderSlice) Len() int { return len(slice) } -func (slice PriceOrderSlice) Less(i, j int) bool { return slice[i].Price < slice[j].Price } +func (slice PriceOrderSlice) Less(i, j int) bool { return slice[i].Price.Compare(slice[j].Price) < 0 } func (slice PriceOrderSlice) Swap(i, j int) { slice[i], slice[j] = slice[j], slice[i] } func (slice PriceOrderSlice) InsertAt(idx int, po PriceOrder) PriceOrderSlice { @@ -47,9 +47,9 @@ func (slice PriceOrderSlice) First() (PriceOrder, bool) { func (slice PriceOrderSlice) Find(price fixedpoint.Value, descending bool) (pv PriceOrder, idx int) { idx = sort.Search(len(slice), func(i int) bool { if descending { - return slice[i].Price <= price + return slice[i].Price.Compare(price) <= 0 } - return slice[i].Price >= price + return slice[i].Price.Compare(price) >= 0 }) if idx >= len(slice) || slice[idx].Price != price { diff --git a/pkg/backtest/recorder.go b/pkg/backtest/recorder.go new file mode 100644 index 0000000000..d5f516ade2 --- /dev/null +++ b/pkg/backtest/recorder.go @@ -0,0 +1,131 @@ +package backtest + +import ( + "fmt" + "path/filepath" + "reflect" + "strings" + + "go.uber.org/multierr" + + "github.com/c9s/bbgo/pkg/data/tsv" + "github.com/c9s/bbgo/pkg/types" +) + +type Instance interface { + ID() string + InstanceID() string +} + +type InstancePropertyIndex struct { + ID string + InstanceID string + Property string +} + +type StateRecorder struct { + outputDirectory string + strategies []Instance + writers map[types.CsvFormatter]*tsv.Writer + manifests Manifests +} + +func NewStateRecorder(outputDir string) *StateRecorder { + return &StateRecorder{ + outputDirectory: outputDir, + writers: make(map[types.CsvFormatter]*tsv.Writer), + manifests: make(Manifests), + } +} + +func (r *StateRecorder) Snapshot() (int, error) { + var c int + for obj, writer := range r.writers { + records := obj.CsvRecords() + for _, record := range records { + if err := writer.Write(record); err != nil { + return c, err + } + c++ + } + + writer.Flush() + } + return c, nil +} + +func (r *StateRecorder) Scan(instance Instance) error { + r.strategies = append(r.strategies, instance) + + rt := reflect.TypeOf(instance) + rv := reflect.ValueOf(instance) + if rt.Kind() == reflect.Ptr { + rt = rt.Elem() + rv = rv.Elem() + } + + if rt.Kind() != reflect.Struct { + return fmt.Errorf("given object is not a struct: %+v", rt) + } + + for i := 0; i < rt.NumField(); i++ { + structField := rt.Field(i) + if !structField.IsExported() { + continue + } + + obj := rv.Field(i).Interface() + switch o := obj.(type) { + + case types.CsvFormatter: // interface type + typeName := strings.ToLower(structField.Type.Elem().Name()) + if typeName == "" { + return fmt.Errorf("%v is a non-defined type", structField.Type) + } + + if err := r.newCsvWriter(o, instance, typeName); err != nil { + return err + } + } + } + + return nil +} + +func (r *StateRecorder) formatCsvFilename(instance Instance, objType string) string { + return filepath.Join(r.outputDirectory, fmt.Sprintf("%s-%s.tsv", instance.InstanceID(), objType)) +} + +func (r *StateRecorder) Manifests() Manifests { + return r.manifests +} + +func (r *StateRecorder) newCsvWriter(o types.CsvFormatter, instance Instance, typeName string) error { + fn := r.formatCsvFilename(instance, typeName) + w, err := tsv.NewWriterFile(fn) + if err != nil { + return err + } + + r.manifests[InstancePropertyIndex{ + ID: instance.ID(), + InstanceID: instance.InstanceID(), + Property: typeName, + }] = fn + + r.writers[o] = w + return w.Write(o.CsvHeader()) +} + +func (r *StateRecorder) Close() error { + var err error + + for _, w := range r.writers { + err2 := w.Close() + if err2 != nil { + err = multierr.Append(err, err2) + } + } + + return err +} diff --git a/pkg/backtest/recorder_test.go b/pkg/backtest/recorder_test.go new file mode 100644 index 0000000000..3b6348d8ef --- /dev/null +++ b/pkg/backtest/recorder_test.go @@ -0,0 +1,61 @@ +package backtest + +import ( + "os" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +type testStrategy struct { + Symbol string + + Position *types.Position +} + +func (s *testStrategy) ID() string { return "my-test" } +func (s *testStrategy) InstanceID() string { return "my-test:" + s.Symbol } + +func TestStateRecorder(t *testing.T) { + tmpDir, _ := os.MkdirTemp(os.TempDir(), "bbgo") + t.Logf("tmpDir: %s", tmpDir) + + st := &testStrategy{ + Symbol: "BTCUSDT", + Position: types.NewPosition("BTCUSDT", "BTC", "USDT"), + } + + recorder := NewStateRecorder(tmpDir) + err := recorder.Scan(st) + assert.NoError(t, err) + assert.Len(t, recorder.writers, 1) + + st.Position.AddTrade(types.Trade{ + OrderID: 1, + Exchange: types.ExchangeBinance, + Price: fixedpoint.NewFromFloat(18000.0), + Quantity: fixedpoint.NewFromFloat(1.0), + QuoteQuantity: fixedpoint.NewFromFloat(18000.0), + Symbol: "BTCUSDT", + Side: types.SideTypeBuy, + IsBuyer: true, + IsMaker: false, + Time: types.Time(time.Now()), + Fee: fixedpoint.NewFromFloat(0.00001), + FeeCurrency: "BNB", + IsMargin: false, + IsFutures: false, + IsIsolated: false, + }) + + n, err := recorder.Snapshot() + assert.NoError(t, err) + assert.Equal(t, 1, n) + + err = recorder.Close() + assert.NoError(t, err) +} diff --git a/pkg/backtest/report.go b/pkg/backtest/report.go new file mode 100644 index 0000000000..782e6e4d6b --- /dev/null +++ b/pkg/backtest/report.go @@ -0,0 +1,194 @@ +package backtest + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "strings" + "time" + + "github.com/fatih/color" + "github.com/gofrs/flock" + + "github.com/c9s/bbgo/pkg/accounting/pnl" + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" + "github.com/c9s/bbgo/pkg/util" +) + +type Run struct { + ID string `json:"id"` + Config *bbgo.Config `json:"config"` + Time time.Time `json:"time"` +} + +type ReportIndex struct { + Runs []Run `json:"runs,omitempty"` +} + +// SummaryReport is the summary of the back-test session +type SummaryReport struct { + StartTime time.Time `json:"startTime"` + EndTime time.Time `json:"endTime"` + Sessions []string `json:"sessions"` + Symbols []string `json:"symbols"` + Intervals []types.Interval `json:"intervals"` + InitialTotalBalances types.BalanceMap `json:"initialTotalBalances"` + FinalTotalBalances types.BalanceMap `json:"finalTotalBalances"` + + // TotalProfit is the profit aggregated from the symbol reports + TotalProfit fixedpoint.Value `json:"totalProfit,omitempty"` + TotalUnrealizedProfit fixedpoint.Value `json:"totalUnrealizedProfit,omitempty"` + + SymbolReports []SessionSymbolReport `json:"symbolReports,omitempty"` + + Manifests Manifests `json:"manifests,omitempty"` +} + +func ReadSummaryReport(filename string) (*SummaryReport, error) { + o, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + + var report SummaryReport + err = json.Unmarshal(o, &report) + return &report, err +} + +// SessionSymbolReport is the report per exchange session +// trades are merged, collected and re-calculated +type SessionSymbolReport struct { + Exchange types.ExchangeName `json:"exchange"` + Symbol string `json:"symbol,omitempty"` + Intervals []types.Interval `json:"intervals,omitempty"` + Subscriptions []types.Subscription `json:"subscriptions"` + Market types.Market `json:"market"` + LastPrice fixedpoint.Value `json:"lastPrice,omitempty"` + StartPrice fixedpoint.Value `json:"startPrice,omitempty"` + PnL *pnl.AverageCostPnlReport `json:"pnl,omitempty"` + InitialBalances types.BalanceMap `json:"initialBalances,omitempty"` + FinalBalances types.BalanceMap `json:"finalBalances,omitempty"` + Manifests Manifests `json:"manifests,omitempty"` +} + +func (r *SessionSymbolReport) Print(wantBaseAssetBaseline bool) { + color.Green("%s %s PROFIT AND LOSS REPORT", r.Exchange, r.Symbol) + color.Green("===============================================") + r.PnL.Print() + + initQuoteAsset := inQuoteAsset(r.InitialBalances, r.Market, r.StartPrice) + finalQuoteAsset := inQuoteAsset(r.FinalBalances, r.Market, r.LastPrice) + color.Green("INITIAL ASSET IN %s ~= %s %s (1 %s = %v)", r.Market.QuoteCurrency, r.Market.FormatQuantity(initQuoteAsset), r.Market.QuoteCurrency, r.Market.BaseCurrency, r.StartPrice) + color.Green("FINAL ASSET IN %s ~= %s %s (1 %s = %v)", r.Market.QuoteCurrency, r.Market.FormatQuantity(finalQuoteAsset), r.Market.QuoteCurrency, r.Market.BaseCurrency, r.LastPrice) + + if r.PnL.Profit.Sign() > 0 { + color.Green("REALIZED PROFIT: +%v %s", r.PnL.Profit, r.Market.QuoteCurrency) + } else { + color.Red("REALIZED PROFIT: %v %s", r.PnL.Profit, r.Market.QuoteCurrency) + } + + if r.PnL.UnrealizedProfit.Sign() > 0 { + color.Green("UNREALIZED PROFIT: +%v %s", r.PnL.UnrealizedProfit, r.Market.QuoteCurrency) + } else { + color.Red("UNREALIZED PROFIT: %v %s", r.PnL.UnrealizedProfit, r.Market.QuoteCurrency) + } + + if finalQuoteAsset.Compare(initQuoteAsset) > 0 { + color.Green("ASSET INCREASED: +%v %s (+%s)", finalQuoteAsset.Sub(initQuoteAsset), r.Market.QuoteCurrency, finalQuoteAsset.Sub(initQuoteAsset).Div(initQuoteAsset).FormatPercentage(2)) + } else { + color.Red("ASSET DECREASED: %v %s (%s)", finalQuoteAsset.Sub(initQuoteAsset), r.Market.QuoteCurrency, finalQuoteAsset.Sub(initQuoteAsset).Div(initQuoteAsset).FormatPercentage(2)) + } + + if wantBaseAssetBaseline { + if r.LastPrice.Compare(r.StartPrice) > 0 { + color.Green("%s BASE ASSET PERFORMANCE: +%s (= (%s - %s) / %s)", + r.Market.BaseCurrency, + r.LastPrice.Sub(r.StartPrice).Div(r.StartPrice).FormatPercentage(2), + r.LastPrice.FormatString(2), + r.StartPrice.FormatString(2), + r.StartPrice.FormatString(2)) + } else { + color.Red("%s BASE ASSET PERFORMANCE: %s (= (%s - %s) / %s)", + r.Market.BaseCurrency, + r.LastPrice.Sub(r.StartPrice).Div(r.StartPrice).FormatPercentage(2), + r.LastPrice.FormatString(2), + r.StartPrice.FormatString(2), + r.StartPrice.FormatString(2)) + } + } +} + +const SessionTimeFormat = "2006-01-02T15_04" + +// FormatSessionName returns the back-test session name +func FormatSessionName(sessions []string, symbols []string, startTime, endTime time.Time) string { + return fmt.Sprintf("%s_%s_%s-%s", + strings.Join(sessions, "-"), + strings.Join(symbols, "-"), + startTime.Format(SessionTimeFormat), + endTime.Format(SessionTimeFormat), + ) +} + +func WriteReportIndex(outputDirectory string, reportIndex *ReportIndex) error { + indexFile := filepath.Join(outputDirectory, "index.json") + if err := util.WriteJsonFile(indexFile, reportIndex); err != nil { + return err + } + return nil +} + +func LoadReportIndex(outputDirectory string) (*ReportIndex, error) { + var reportIndex ReportIndex + indexFile := filepath.Join(outputDirectory, "index.json") + if _, err := os.Stat(indexFile); err == nil { + o, err := ioutil.ReadFile(indexFile) + if err != nil { + return nil, err + } + + if err := json.Unmarshal(o, &reportIndex); err != nil { + return nil, err + } + } + + return &reportIndex, nil +} + +func AddReportIndexRun(outputDirectory string, run Run) error { + // append report index + lockFile := filepath.Join(outputDirectory, ".report.lock") + fileLock := flock.New(lockFile) + + err := fileLock.Lock() + if err != nil { + return err + } + + defer func() { + if err := fileLock.Unlock(); err != nil { + log.WithError(err).Errorf("report index file lock error: %s", lockFile) + } + if err := os.Remove(lockFile); err != nil { + log.WithError(err).Errorf("can not remove lock file: %s", lockFile) + } + }() + reportIndex, err := LoadReportIndex(outputDirectory) + if err != nil { + return err + } + + reportIndex.Runs = append(reportIndex.Runs, run) + return WriteReportIndex(outputDirectory, reportIndex) +} + +// inQuoteAsset converts all balances in quote asset +func inQuoteAsset(balances types.BalanceMap, market types.Market, price fixedpoint.Value) fixedpoint.Value { + quote := balances[market.QuoteCurrency] + base := balances[market.BaseCurrency] + return base.Total().Mul(price).Add(quote.Total()) +} diff --git a/pkg/backtest/stream.go b/pkg/backtest/stream.go deleted file mode 100644 index d3a1d491e0..0000000000 --- a/pkg/backtest/stream.go +++ /dev/null @@ -1,87 +0,0 @@ -package backtest - -import ( - "context" - "fmt" - - log "github.com/sirupsen/logrus" - - "github.com/c9s/bbgo/pkg/types" -) - -type Stream struct { - types.StandardStream - - exchange *Exchange -} - -func (s *Stream) Connect(ctx context.Context) error { - log.Infof("collecting backtest configurations...") - - loadedSymbols := map[string]struct{}{} - loadedIntervals := map[types.Interval]struct{}{ - // 1m interval is required for the backtest matching engine - types.Interval1m: {}, - types.Interval1d: {}, - } - - for _, sub := range s.Subscriptions { - loadedSymbols[sub.Symbol] = struct{}{} - - switch sub.Channel { - case types.KLineChannel: - loadedIntervals[types.Interval(sub.Options.Interval)] = struct{}{} - - default: - return fmt.Errorf("stream channel %s is not supported in backtest", sub.Channel) - } - } - - var symbols []string - for symbol := range loadedSymbols { - symbols = append(symbols, symbol) - } - - var intervals []types.Interval - for interval := range loadedIntervals { - intervals = append(intervals, interval) - } - - log.Infof("used symbols: %v and intervals: %v", symbols, intervals) - - go func() { - s.EmitConnect() - - klineC, errC := s.exchange.srv.QueryKLinesCh(s.exchange.startTime, s.exchange.endTime, s.exchange, symbols, intervals) - for k := range klineC { - if k.Interval == types.Interval1m { - matching, ok := s.exchange.matchingBooks[k.Symbol] - if !ok { - log.Errorf("matching book of %s is not initialized", k.Symbol) - } - matching.processKLine(k) - } - - s.EmitKLineClosed(k) - } - - if err := <-errC; err != nil { - log.WithError(err).Error("backtest data feed error") - } - - if err := s.Close(); err != nil { - log.WithError(err).Error("stream close error") - } - }() - - return nil -} - -func (s *Stream) SetPublicOnly() { - return -} - -func (s *Stream) Close() error { - close(s.exchange.doneC) - return nil -} diff --git a/pkg/bbgo/active_book.go b/pkg/bbgo/active_book.go deleted file mode 100644 index b0fd941042..0000000000 --- a/pkg/bbgo/active_book.go +++ /dev/null @@ -1,135 +0,0 @@ -package bbgo - -import ( - log "github.com/sirupsen/logrus" - - "github.com/c9s/bbgo/pkg/types" -) - -// LocalActiveOrderBook manages the local active order books. -//go:generate callbackgen -type LocalActiveOrderBook -type LocalActiveOrderBook struct { - Bids *types.SyncOrderMap - Asks *types.SyncOrderMap - - filledCallbacks []func(o types.Order) -} - -func NewLocalActiveOrderBook() *LocalActiveOrderBook { - return &LocalActiveOrderBook{ - Bids: types.NewSyncOrderMap(), - Asks: types.NewSyncOrderMap(), - } -} - -func (b *LocalActiveOrderBook) BindStream(stream types.Stream) { - stream.OnOrderUpdate(b.orderUpdateHandler) -} - -func (b *LocalActiveOrderBook) orderUpdateHandler(order types.Order) { - log.Debugf("[LocalActiveOrderBook] received order update: %+v", order) - - switch order.Status { - case types.OrderStatusFilled: - // make sure we have the order and we remove it - if b.Remove(order) { - b.EmitFilled(order) - } - - case types.OrderStatusPartiallyFilled, types.OrderStatusNew: - b.Update(order) - - case types.OrderStatusCanceled, types.OrderStatusRejected: - log.Debugf("[LocalActiveOrderBook] order status %s, removing %d...", order.Status, order.OrderID) - b.Remove(order) - } -} - -func (b *LocalActiveOrderBook) Print() { - for _, o := range b.Bids.Orders() { - log.Infof("bid order: %d -> %s", o.OrderID, o.Status) - } - - for _, o := range b.Asks.Orders() { - log.Infof("ask order: %d -> %s", o.OrderID, o.Status) - } -} - -func (b *LocalActiveOrderBook) Update(orders ...types.Order) { - for _, order := range orders { - switch order.Side { - case types.SideTypeBuy: - b.Bids.Update(order) - - case types.SideTypeSell: - b.Asks.Update(order) - - } - } -} - -func (b *LocalActiveOrderBook) Add(orders ...types.Order) { - for _, order := range orders { - switch order.Side { - case types.SideTypeBuy: - b.Bids.Add(order) - - case types.SideTypeSell: - b.Asks.Add(order) - - } - } -} - -func (b *LocalActiveOrderBook) NumOfBids() int { - return b.Bids.Len() -} - -func (b *LocalActiveOrderBook) NumOfAsks() int { - return b.Asks.Len() -} - -func (b *LocalActiveOrderBook) Remove(order types.Order) bool { - switch order.Side { - case types.SideTypeBuy: - return b.Bids.Remove(order.OrderID) - - case types.SideTypeSell: - return b.Asks.Remove(order.OrderID) - - } - - return false -} - -// WriteOff writes off the filled order on the opposite side. -// This method does not write off order by order amount or order quantity. -func (b *LocalActiveOrderBook) WriteOff(order types.Order) bool { - if order.Status != types.OrderStatusFilled { - return false - } - - switch order.Side { - case types.SideTypeSell: - // find the filled bid to remove - if filledOrder, ok := b.Bids.AnyFilled(); ok { - b.Bids.Remove(filledOrder.OrderID) - b.Asks.Remove(order.OrderID) - return true - } - - case types.SideTypeBuy: - // find the filled ask order to remove - if filledOrder, ok := b.Asks.AnyFilled(); ok { - b.Asks.Remove(filledOrder.OrderID) - b.Bids.Remove(order.OrderID) - return true - } - } - - return false -} - -func (b *LocalActiveOrderBook) Orders() types.OrderSlice { - return append(b.Asks.Orders(), b.Bids.Orders()...) -} diff --git a/pkg/bbgo/activeorderbook.go b/pkg/bbgo/activeorderbook.go new file mode 100644 index 0000000000..e720e46fdf --- /dev/null +++ b/pkg/bbgo/activeorderbook.go @@ -0,0 +1,193 @@ +package bbgo + +import ( + "context" + "encoding/json" + "time" + + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/types" +) + +const CancelOrderWaitTime = 20 * time.Millisecond + +// ActiveOrderBook manages the local active order books. +//go:generate callbackgen -type ActiveOrderBook +type ActiveOrderBook struct { + Symbol string + orders *types.SyncOrderMap + filledCallbacks []func(o types.Order) +} + +func NewActiveOrderBook(symbol string) *ActiveOrderBook { + return &ActiveOrderBook{ + Symbol: symbol, + orders: types.NewSyncOrderMap(), + } +} + +func (b *ActiveOrderBook) MarshalJSON() ([]byte, error) { + orders := b.Backup() + return json.Marshal(orders) +} + +func (b *ActiveOrderBook) Backup() []types.SubmitOrder { + return b.orders.Backup() +} + +func (b *ActiveOrderBook) BindStream(stream types.Stream) { + stream.OnOrderUpdate(b.orderUpdateHandler) +} + +func (b *ActiveOrderBook) waitAllClear(ctx context.Context, waitTime, timeout time.Duration) (bool, error) { + numOfOrders := b.NumOfOrders() + clear := numOfOrders == 0 + if clear { + return clear, nil + } + + timeoutC := time.After(timeout) + for { + time.Sleep(waitTime) + numOfOrders = b.NumOfOrders() + clear = numOfOrders == 0 + select { + case <-timeoutC: + return clear, nil + + case <-ctx.Done(): + return clear, ctx.Err() + + default: + if clear { + return clear, nil + } + } + } +} + +// GracefulCancel cancels the active orders gracefully +func (b *ActiveOrderBook) GracefulCancel(ctx context.Context, ex types.Exchange) error { + // optimize order cancel for back-testing + if IsBackTesting { + orders := b.Orders() + return ex.CancelOrders(context.Background(), orders...) + } + + log.Debugf("[ActiveOrderBook] gracefully cancelling %s orders...", b.Symbol) + waitTime := CancelOrderWaitTime + + startTime := time.Now() + // ensure every order is cancelled + for { + orders := b.Orders() + + // Some orders in the variable are not created on the server side yet, + // If we cancel these orders directly, we will get an unsent order error + // We wait here for a while for server to create these orders. + // time.Sleep(SentOrderWaitTime) + + // since ctx might be canceled, we should use background context here + if err := ex.CancelOrders(context.Background(), orders...); err != nil { + log.WithError(err).Errorf("[ActiveOrderBook] can not cancel %s orders", b.Symbol) + } + + log.Debugf("[ActiveOrderBook] waiting %s for %s orders to be cancelled...", waitTime, b.Symbol) + + clear, err := b.waitAllClear(ctx, waitTime, 5*time.Second) + if clear || err != nil { + break + } + + log.Warnf("[ActiveOrderBook] %d %s orders are not cancelled yet:", b.NumOfOrders(), b.Symbol) + b.Print() + + // verify the current open orders via the RESTful API + log.Warnf("[ActiveOrderBook] using REStful API to verify active orders...") + openOrders, err := ex.QueryOpenOrders(ctx, b.Symbol) + if err != nil { + log.WithError(err).Errorf("can not query %s open orders", b.Symbol) + continue + } + + openOrderStore := NewOrderStore(b.Symbol) + openOrderStore.Add(openOrders...) + for _, o := range orders { + // if it's not on the order book (open orders), we should remove it from our local side + if !openOrderStore.Exists(o.OrderID) { + b.Remove(o) + } + } + } + + log.Debugf("[ActiveOrderBook] all %s orders are cancelled successfully in %s", b.Symbol, time.Since(startTime)) + return nil +} + +func (b *ActiveOrderBook) orderUpdateHandler(order types.Order) { + hasSymbol := len(b.Symbol) > 0 + if hasSymbol && order.Symbol != b.Symbol { + return + } + + log.Debugf("[ActiveOrderBook] received order update: %+v", order) + + switch order.Status { + case types.OrderStatusFilled: + // make sure we have the order and we remove it + if b.Remove(order) { + b.EmitFilled(order) + } + + case types.OrderStatusPartiallyFilled, types.OrderStatusNew: + b.Update(order) + + case types.OrderStatusCanceled, types.OrderStatusRejected: + log.Debugf("[ActiveOrderBook] order status %s, removing order %s", order.Status, order) + b.Remove(order) + + default: + log.Warnf("unhandled order status: %s", order.Status) + } +} + +func (b *ActiveOrderBook) Print() { + for _, o := range b.orders.Orders() { + log.Infof("%s", o) + } +} + +func (b *ActiveOrderBook) Update(orders ...types.Order) { + hasSymbol := len(b.Symbol) > 0 + for _, order := range orders { + if hasSymbol && b.Symbol == order.Symbol { + b.orders.Update(order) + } + } +} + +func (b *ActiveOrderBook) Add(orders ...types.Order) { + hasSymbol := len(b.Symbol) > 0 + for _, order := range orders { + if hasSymbol && b.Symbol == order.Symbol { + b.orders.Add(order) + } + } +} + +func (b *ActiveOrderBook) Exists(order types.Order) bool { + return b.orders.Exists(order.OrderID) +} + +func (b *ActiveOrderBook) Remove(order types.Order) bool { + return b.orders.Remove(order.OrderID) +} + +func (b *ActiveOrderBook) NumOfOrders() int { + return b.orders.Len() +} + +func (b *ActiveOrderBook) Orders() types.OrderSlice { + return b.orders.Orders() +} diff --git a/pkg/bbgo/activeorderbook_callbacks.go b/pkg/bbgo/activeorderbook_callbacks.go new file mode 100644 index 0000000000..5110476043 --- /dev/null +++ b/pkg/bbgo/activeorderbook_callbacks.go @@ -0,0 +1,17 @@ +// Code generated by "callbackgen -type ActiveOrderBook"; DO NOT EDIT. + +package bbgo + +import ( + "github.com/c9s/bbgo/pkg/types" +) + +func (b *ActiveOrderBook) OnFilled(cb func(o types.Order)) { + b.filledCallbacks = append(b.filledCallbacks, cb) +} + +func (b *ActiveOrderBook) EmitFilled(o types.Order) { + for _, cb := range b.filledCallbacks { + cb(o) + } +} diff --git a/pkg/bbgo/builder.go b/pkg/bbgo/builder.go index dcee9dc396..bf328c2c89 100644 --- a/pkg/bbgo/builder.go +++ b/pkg/bbgo/builder.go @@ -15,8 +15,10 @@ import ( "github.com/sirupsen/logrus" ) -var wrapperTemplate = template.Must(template.New("main").Parse(`package main -// DO NOT MODIFY THIS FILE. THIS FILE IS GENERATED FOR IMPORTING STRATEGIES +var wrapperTemplate = template.Must(template.New("main").Parse(`// Code generated by bbgo; DO NOT EDIT. + +package main + import ( "github.com/c9s/bbgo/pkg/bbgo" "github.com/c9s/bbgo/pkg/cmd" @@ -80,8 +82,6 @@ func Build(ctx context.Context, userConfig *Config, targetConfig BuildTargetConf return "", err } - defer os.RemoveAll(packageDir) - if err := compilePackage(packageDir, userConfig, imports); err != nil { return "", err } @@ -110,8 +110,9 @@ func Build(ctx context.Context, userConfig *Config, targetConfig BuildTargetConf output := filepath.Join(buildDir, binary) - logrus.Infof("building binary %s from %s...", output, buildTarget) - buildCmd := exec.CommandContext(ctx, "go", "build", "-tags", "wrapper", "-o", output, buildTarget) + args := []string{"build", "-tags", "wrapper", "-o", output, buildTarget} + logrus.Debugf("building binary %s from %s: go %v", output, buildTarget, args) + buildCmd := exec.CommandContext(ctx, "go", args...) buildCmd.Env = append(os.Environ(), buildEnvs...) buildCmd.Stdout = os.Stdout buildCmd.Stderr = os.Stderr @@ -119,7 +120,7 @@ func Build(ctx context.Context, userConfig *Config, targetConfig BuildTargetConf return output, err } - return output, nil + return output, os.RemoveAll(packageDir) } func BuildTarget(ctx context.Context, userConfig *Config, target BuildTargetConfig) (string, error) { @@ -128,6 +129,13 @@ func BuildTarget(ctx context.Context, userConfig *Config, target BuildTargetConf buildDir = "build" } + if _, err := os.Stat(buildDir); os.IsNotExist(err) { + err = os.Mkdir(buildDir, 0777) + if err != nil { + return "", err + } + } + buildDir = filepath.Join(userConfig.Build.BuildDir, target.Name) return Build(ctx, userConfig, target) } diff --git a/pkg/bbgo/config.go b/pkg/bbgo/config.go index c706e8af7b..dd3c8ad17a 100644 --- a/pkg/bbgo/config.go +++ b/pkg/bbgo/config.go @@ -1,34 +1,62 @@ package bbgo import ( + "bytes" "encoding/json" "fmt" "io/ioutil" "reflect" "runtime" - "time" + "strings" "github.com/pkg/errors" "gopkg.in/yaml.v3" "github.com/c9s/bbgo/pkg/datatype" "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/service" "github.com/c9s/bbgo/pkg/types" ) +// DefaultFeeRate set the fee rate for most cases +// BINANCE uses 0.1% for both maker and taker +// for BNB holders, it's 0.075% for both maker and taker +// MAX uses 0.050% for maker and 0.15% for taker +var DefaultFeeRate = fixedpoint.NewFromFloat(0.075 * 0.01) + type PnLReporterConfig struct { AverageCostBySymbols datatype.StringSlice `json:"averageCostBySymbols" yaml:"averageCostBySymbols"` Of datatype.StringSlice `json:"of" yaml:"of"` When datatype.StringSlice `json:"when" yaml:"when"` } -// ExchangeStrategyMount wraps the SingleExchangeStrategy with the Session name for mounting +// ExchangeStrategyMount wraps the SingleExchangeStrategy with the ExchangeSession name for mounting type ExchangeStrategyMount struct { - // Mounts contains the Session name to mount - Mounts []string + // Mounts contains the ExchangeSession name to mount + Mounts []string `json:"mounts"` // Strategy is the strategy we loaded from config - Strategy SingleExchangeStrategy + Strategy SingleExchangeStrategy `json:"strategy"` +} + +func (m *ExchangeStrategyMount) Map() (map[string]interface{}, error) { + strategyID := m.Strategy.ID() + + var params map[string]interface{} + + out, err := json.Marshal(m.Strategy) + if err != nil { + return nil, err + } + + if err := json.Unmarshal(out, ¶ms); err != nil { + return nil, err + } + + return map[string]interface{}{ + "on": m.Mounts, + strategyID: params, + }, nil } type SlackNotification struct { @@ -36,61 +64,106 @@ type SlackNotification struct { ErrorChannel string `json:"errorChannel,omitempty" yaml:"errorChannel,omitempty"` } -type NotificationRouting struct { +type SlackNotificationRouting struct { Trade string `json:"trade,omitempty" yaml:"trade,omitempty"` Order string `json:"order,omitempty" yaml:"order,omitempty"` SubmitOrder string `json:"submitOrder,omitempty" yaml:"submitOrder,omitempty"` PnL string `json:"pnL,omitempty" yaml:"pnL,omitempty"` } +type TelegramNotification struct { + Broadcast bool `json:"broadcast" yaml:"broadcast"` +} + type NotificationConfig struct { Slack *SlackNotification `json:"slack,omitempty" yaml:"slack,omitempty"` + Telegram *TelegramNotification `json:"telegram,omitempty" yaml:"telegram,omitempty"` + SymbolChannels map[string]string `json:"symbolChannels,omitempty" yaml:"symbolChannels,omitempty"` SessionChannels map[string]string `json:"sessionChannels,omitempty" yaml:"sessionChannels,omitempty"` - Routing *NotificationRouting `json:"routing,omitempty" yaml:"routing,omitempty"` + Routing *SlackNotificationRouting `json:"routing,omitempty" yaml:"routing,omitempty"` } type Session struct { - ExchangeName string `json:"exchange" yaml:"exchange"` - EnvVarPrefix string `json:"envVarPrefix" yaml:"envVarPrefix"` + Name string `json:"name,omitempty" yaml:"name,omitempty"` + ExchangeName string `json:"exchange" yaml:"exchange"` + EnvVarPrefix string `json:"envVarPrefix" yaml:"envVarPrefix"` + + Key string `json:"key,omitempty" yaml:"key,omitempty"` + Secret string `json:"secret,omitempty" yaml:"secret,omitempty"` + PublicOnly bool `json:"publicOnly,omitempty" yaml:"publicOnly"` - Margin bool `json:"margin,omitempty" yaml:"margin"` + Margin bool `json:"margin,omitempty" yaml:"margin,omitempty"` IsolatedMargin bool `json:"isolatedMargin,omitempty" yaml:"isolatedMargin,omitempty"` IsolatedMarginSymbol string `json:"isolatedMarginSymbol,omitempty" yaml:"isolatedMarginSymbol,omitempty"` } type Backtest struct { - StartTime string `json:"startTime" yaml:"startTime"` - EndTime string `json:"endTime" yaml:"endTime"` + StartTime types.LooseFormatTime `json:"startTime,omitempty" yaml:"startTime,omitempty"` + EndTime *types.LooseFormatTime `json:"endTime,omitempty" yaml:"endTime,omitempty"` - Account BacktestAccount `json:"account" yaml:"account"` - Symbols []string `json:"symbols" yaml:"symbols"` -} + // RecordTrades is an option, if set to true, back-testing should record the trades into database + RecordTrades bool `json:"recordTrades,omitempty" yaml:"recordTrades,omitempty"` -func (t Backtest) ParseEndTime() (time.Time, error) { - if len(t.EndTime) == 0 { - return time.Time{}, errors.New("backtest.endTime must be defined") - } + // Deprecated: + // Account is deprecated, use Accounts instead + Account map[string]BacktestAccount `json:"account" yaml:"account"` - return time.Parse("2006-01-02", t.EndTime) + Accounts map[string]BacktestAccount `json:"accounts" yaml:"accounts"` + Symbols []string `json:"symbols" yaml:"symbols"` + Sessions []string `json:"sessions" yaml:"sessions"` } -func (t Backtest) ParseStartTime() (time.Time, error) { - if len(t.StartTime) == 0 { - return time.Time{}, errors.New("backtest.startTime must be defined") +func (b *Backtest) GetAccount(n string) BacktestAccount { + accountConfig, ok := b.Accounts[n] + if ok { + return accountConfig + } + + accountConfig, ok = b.Account[n] + if ok { + return accountConfig } - return time.Parse("2006-01-02", t.StartTime) + return DefaultBacktestAccount } type BacktestAccount struct { - MakerCommission int `json:"makerCommission"` - TakerCommission int `json:"takerCommission"` - BuyerCommission int `json:"buyerCommission"` - SellerCommission int `json:"sellerCommission"` - Balances BacktestAccountBalanceMap `json:"balances" yaml:"balances"` + // TODO: MakerFeeRate should replace the commission fields + MakerFeeRate fixedpoint.Value `json:"makerFeeRate,omitempty" yaml:"makerFeeRate,omitempty"` + TakerFeeRate fixedpoint.Value `json:"takerFeeRate,omitempty" yaml:"takerFeeRate,omitempty"` + + Balances BacktestAccountBalanceMap `json:"balances" yaml:"balances"` +} + +var DefaultBacktestAccount = BacktestAccount{ + MakerFeeRate: fixedpoint.MustNewFromString("0.050%"), + TakerFeeRate: fixedpoint.MustNewFromString("0.075%"), + Balances: BacktestAccountBalanceMap{ + "USDT": fixedpoint.NewFromFloat(10000), + }, +} + +type BA BacktestAccount + +func (b *BacktestAccount) UnmarshalYAML(value *yaml.Node) error { + bb := &BA{MakerFeeRate: DefaultFeeRate, TakerFeeRate: DefaultFeeRate} + if err := value.Decode(bb); err != nil { + return err + } + *b = BacktestAccount(*bb) + return nil +} + +func (b *BacktestAccount) UnmarshalJSON(input []byte) error { + bb := &BA{MakerFeeRate: DefaultFeeRate, TakerFeeRate: DefaultFeeRate} + if err := json.Unmarshal(input, bb); err != nil { + return err + } + *b = BacktestAccount(*bb) + return nil } type BacktestAccountBalanceMap map[string]fixedpoint.Value @@ -101,41 +174,30 @@ func (m BacktestAccountBalanceMap) BalanceMap() types.BalanceMap { balances[currency] = types.Balance{ Currency: currency, Available: value, - Locked: 0, + Locked: fixedpoint.Zero, } } return balances } -type RedisPersistenceConfig struct { - Host string `json:"host" env:"REDIS_HOST"` - Port string `json:"port" env:"REDIS_PORT"` - Password string `json:"password" env:"REDIS_PASSWORD"` - DB int `json:"db" env:"REDIS_DB"` -} - -type JsonPersistenceConfig struct { - Directory string `json:"directory"` -} - type PersistenceConfig struct { - Redis *RedisPersistenceConfig `json:"redis,omitempty" yaml:"redis,omitempty"` - Json *JsonPersistenceConfig `json:"json,omitempty" yaml:"json,omitempty"` + Redis *service.RedisPersistenceConfig `json:"redis,omitempty" yaml:"redis,omitempty"` + Json *service.JsonPersistenceConfig `json:"json,omitempty" yaml:"json,omitempty"` } type BuildTargetConfig struct { Name string `json:"name" yaml:"name"` Arch string `json:"arch" yaml:"arch"` OS string `json:"os" yaml:"os"` - LDFlags datatype.StringSlice `json:"ldflags" yaml:"ldflags"` - GCFlags datatype.StringSlice `json:"gcflags" yaml:"gcflags"` - Imports []string `json:"imports" yaml:"imports"` + LDFlags datatype.StringSlice `json:"ldflags,omitempty" yaml:"ldflags,omitempty"` + GCFlags datatype.StringSlice `json:"gcflags,omitempty" yaml:"gcflags,omitempty"` + Imports []string `json:"imports,omitempty" yaml:"imports,omitempty"` } type BuildConfig struct { - BuildDir string `json:"buildDir" yaml:"buildDir"` - Imports []string `json:"imports" yaml:"imports"` - Targets []BuildTargetConfig `json:"targets" yaml:"targets"` + BuildDir string `json:"buildDir,omitempty" yaml:"buildDir,omitempty"` + Imports []string `json:"imports,omitempty" yaml:"imports,omitempty"` + Targets []BuildTargetConfig `json:"targets,omitempty" yaml:"targets,omitempty"` } func GetNativeBuildTargetConfig() BuildTargetConfig { @@ -146,29 +208,203 @@ func GetNativeBuildTargetConfig() BuildTargetConfig { } } +type SyncSymbol struct { + Symbol string `json:"symbol" yaml:"symbol"` + Session string `json:"session" yaml:"session"` +} + +func (ss *SyncSymbol) UnmarshalYAML(unmarshal func(a interface{}) error) (err error) { + var s string + if err = unmarshal(&s); err == nil { + aa := strings.SplitN(s, ":", 2) + if len(aa) > 1 { + ss.Session = aa[0] + ss.Symbol = aa[1] + } else { + ss.Symbol = aa[0] + } + return nil + } + + type localSyncSymbol SyncSymbol + var ssNew localSyncSymbol + if err = unmarshal(&ssNew); err == nil { + *ss = SyncSymbol(ssNew) + return nil + } + + return err +} + +func categorizeSyncSymbol(slice []SyncSymbol) (map[string][]string, []string) { + var rest []string + var m = make(map[string][]string) + for _, ss := range slice { + if len(ss.Session) > 0 { + m[ss.Session] = append(m[ss.Session], ss.Symbol) + } else { + rest = append(rest, ss.Symbol) + } + } + return m, rest +} + +type SyncConfig struct { + // Sessions to sync, if ignored, all defined sessions will sync + Sessions []string `json:"sessions,omitempty" yaml:"sessions,omitempty"` + + // Symbols is the list of session:symbol pair to sync, if ignored, symbols wlll be discovered by your existing crypto balances + // Valid formats are: {session}:{symbol}, {symbol} or in YAML object form {symbol: "BTCUSDT", session:"max" } + Symbols []SyncSymbol `json:"symbols,omitempty" yaml:"symbols,omitempty"` + + // DepositHistory is for syncing deposit history + DepositHistory bool `json:"depositHistory" yaml:"depositHistory"` + + // WithdrawHistory is for syncing withdraw history + WithdrawHistory bool `json:"withdrawHistory" yaml:"withdrawHistory"` + + // RewardHistory is for syncing reward history + RewardHistory bool `json:"rewardHistory" yaml:"rewardHistory"` + + // MarginHistory is for syncing margin related history: loans, repays, interests and liquidations + MarginHistory bool `json:"marginHistory" yaml:"marginHistory"` + + MarginAssets []string `json:"marginAssets" yaml:"marginAssets"` + + // Since is the date where you want to start syncing data + Since *types.LooseFormatTime `json:"since,omitempty"` + + // UserDataStream is for real-time sync with websocket user data stream + UserDataStream *struct { + Trades bool `json:"trades,omitempty" yaml:"trades,omitempty"` + FilledOrders bool `json:"filledOrders,omitempty" yaml:"filledOrders,omitempty"` + } `json:"userDataStream,omitempty" yaml:"userDataStream,omitempty"` +} + type Config struct { - Build *BuildConfig `json:"build" yaml:"build"` + Build *BuildConfig `json:"build,omitempty" yaml:"build,omitempty"` // Imports is deprecated // Deprecated: use BuildConfig instead - Imports []string `json:"imports" yaml:"imports"` + Imports []string `json:"imports,omitempty" yaml:"imports,omitempty"` Backtest *Backtest `json:"backtest,omitempty" yaml:"backtest,omitempty"` + Sync *SyncConfig `json:"sync,omitempty" yaml:"sync,omitempty"` + Notifications *NotificationConfig `json:"notifications,omitempty" yaml:"notifications,omitempty"` Persistence *PersistenceConfig `json:"persistence,omitempty" yaml:"persistence,omitempty"` - Sessions map[string]Session `json:"sessions,omitempty" yaml:"sessions,omitempty"` + Sessions map[string]*ExchangeSession `json:"sessions,omitempty" yaml:"sessions,omitempty"` RiskControls *RiskControls `json:"riskControls,omitempty" yaml:"riskControls,omitempty"` - ExchangeStrategies []ExchangeStrategyMount - CrossExchangeStrategies []CrossExchangeStrategy + ExchangeStrategies []ExchangeStrategyMount `json:"-" yaml:"-"` + CrossExchangeStrategies []CrossExchangeStrategy `json:"-" yaml:"-"` PnLReporters []PnLReporterConfig `json:"reportPnL,omitempty" yaml:"reportPnL,omitempty"` } +func (c *Config) Map() (map[string]interface{}, error) { + text, err := json.Marshal(c) + if err != nil { + return nil, err + } + + var data map[string]interface{} + err = json.Unmarshal(text, &data) + if err != nil { + return nil, err + } + + // convert strategy config back to the DSL format + var exchangeStrategies []map[string]interface{} + for _, m := range c.ExchangeStrategies { + params, err := m.Map() + if err != nil { + return nil, err + } + + exchangeStrategies = append(exchangeStrategies, params) + } + + if len(exchangeStrategies) > 0 { + data["exchangeStrategies"] = exchangeStrategies + } + + var crossExchangeStrategies []map[string]interface{} + for _, st := range c.CrossExchangeStrategies { + strategyID := st.ID() + + var params Stash + + out, err := json.Marshal(st) + if err != nil { + return nil, err + } + + if err := json.Unmarshal(out, ¶ms); err != nil { + return nil, err + } + + crossExchangeStrategies = append(crossExchangeStrategies, map[string]interface{}{ + strategyID: params, + }) + } + + if len(crossExchangeStrategies) > 0 { + data["crossExchangeStrategies"] = crossExchangeStrategies + } + + return data, err +} + +func (c *Config) YAML() ([]byte, error) { + m, err := c.Map() + if err != nil { + return nil, err + } + + var buf bytes.Buffer + var enc = yaml.NewEncoder(&buf) + enc.SetIndent(2) + err = enc.Encode(m) + return buf.Bytes(), err +} + +func (c *Config) GetSignature() string { + var s string + + var ps []string + + // for single exchange strategy + if len(c.ExchangeStrategies) == 1 && len(c.CrossExchangeStrategies) == 0 { + mount := c.ExchangeStrategies[0].Mounts[0] + ps = append(ps, mount) + + strategy := c.ExchangeStrategies[0].Strategy + + id := strategy.ID() + ps = append(ps, id) + + if symbol, ok := isSymbolBasedStrategy(reflect.ValueOf(strategy)); ok { + ps = append(ps, symbol) + } + } + + startTime := c.Backtest.StartTime.Time() + ps = append(ps, startTime.Format("2006-01-02")) + + if c.Backtest.EndTime != nil { + endTime := c.Backtest.EndTime.Time() + ps = append(ps, endTime.Format("2006-01-02")) + } + + s = strings.Join(ps, "_") + return s +} + type Stash map[string]interface{} func loadStash(config []byte) (Stash, error) { @@ -209,6 +445,7 @@ func LoadBuildConfig(configFile string) (*Config, error) { return &config, nil } +// Load parses the config func Load(configFile string, loadStrategies bool) (*Config, error) { var config Config @@ -288,10 +525,25 @@ func loadCrossExchangeStrategies(config *Config, stash Stash) (err error) { return nil } +func NewStrategyFromMap(id string, conf interface{}) (SingleExchangeStrategy, error) { + if st, ok := LoadedExchangeStrategies[id]; ok { + val, err := reUnmarshal(conf, st) + if err != nil { + return nil, err + } + return val.(SingleExchangeStrategy), nil + } + + return nil, fmt.Errorf("strategy %s not found", id) +} + func loadExchangeStrategies(config *Config, stash Stash) (err error) { exchangeStrategiesConf, ok := stash["exchangeStrategies"] if !ok { - return nil + exchangeStrategiesConf, ok = stash["strategies"] + if !ok { + return nil + } } if len(LoadedExchangeStrategies) == 0 { @@ -311,25 +563,44 @@ func loadExchangeStrategies(config *Config, stash Stash) (err error) { var mounts []string if val, ok := configStash["on"]; ok { - if values, ok := val.([]string); ok { - mounts = append(mounts, values...) - } else if str, ok := val.(string); ok { - mounts = append(mounts, str) + switch tv := val.(type) { + + case []string: + mounts = append(mounts, tv...) + + case string: + mounts = append(mounts, tv) + + case []interface{}: + for _, f := range tv { + s, ok := f.(string) + if !ok { + return fmt.Errorf("%+v (%T) is not a string", f, f) + } + + mounts = append(mounts, s) + } + + default: + return fmt.Errorf("unexpected mount type: %T value: %+v", val, val) } } - for id, conf := range configStash { + // look up the real struct type - if st, ok := LoadedExchangeStrategies[id]; ok { - val, err := reUnmarshal(conf, st) + if _, ok := LoadedExchangeStrategies[id]; ok { + st, err := NewStrategyFromMap(id, conf) if err != nil { return err } config.ExchangeStrategies = append(config.ExchangeStrategies, ExchangeStrategyMount{ Mounts: mounts, - Strategy: val.(SingleExchangeStrategy), + Strategy: st, }) + } else if id != "on" && id != "off" { + // Show error when we didn't find the Strategy + return fmt.Errorf("strategy %s in config not found", id) } } } diff --git a/pkg/bbgo/config_test.go b/pkg/bbgo/config_test.go index aea43969d9..7232904314 100644 --- a/pkg/bbgo/config_test.go +++ b/pkg/bbgo/config_test.go @@ -2,9 +2,13 @@ package bbgo import ( "context" + "io/ioutil" "testing" "github.com/stretchr/testify/assert" + "gopkg.in/yaml.v3" + + "github.com/c9s/bbgo/pkg/fixedpoint" ) func init() { @@ -12,11 +16,15 @@ func init() { } type TestStrategy struct { - Symbol string `json:"symbol"` - Interval string `json:"interval"` - BaseQuantity float64 `json:"baseQuantity"` - MaxAssetQuantity float64 `json:"maxAssetQuantity"` - MinDropPercentage float64 `json:"minDropPercentage"` + Symbol string `json:"symbol"` + Interval string `json:"interval"` + BaseQuantity fixedpoint.Value `json:"baseQuantity"` + MaxAssetQuantity fixedpoint.Value `json:"maxAssetQuantity"` + MinDropPercentage fixedpoint.Value `json:"minDropPercentage"` +} + +func (s *TestStrategy) ID() string { + return "test" } func (s *TestStrategy) Run(ctx context.Context, orderExecutor OrderExecutor, session *ExchangeSession) error { @@ -51,12 +59,87 @@ func TestLoadConfig(t *testing.T) { assert.Equal(t, "#error", config.Notifications.Slack.ErrorChannel) }, }, + { name: "strategy", args: args{configFile: "testdata/strategy.yaml"}, wantErr: false, f: func(t *testing.T, config *Config) { assert.Len(t, config.ExchangeStrategies, 1) + assert.Equal(t, []ExchangeStrategyMount{{ + Mounts: []string{"binance"}, + Strategy: &TestStrategy{ + Symbol: "BTCUSDT", + Interval: "1m", + BaseQuantity: fixedpoint.NewFromFloat(0.1), + MaxAssetQuantity: fixedpoint.NewFromFloat(1.1), + MinDropPercentage: fixedpoint.NewFromFloat(-0.05), + }, + }}, config.ExchangeStrategies) + + m, err := config.Map() + assert.NoError(t, err) + assert.Equal(t, map[string]interface{}{ + "sessions": map[string]interface{}{ + "max": map[string]interface{}{ + "exchange": "max", + "envVarPrefix": "MAX", + "takerFeeRate": 0., + "makerFeeRate": 0., + }, + "binance": map[string]interface{}{ + "exchange": "binance", + "envVarPrefix": "BINANCE", + "takerFeeRate": 0., + "makerFeeRate": 0., + }, + }, + "build": map[string]interface{}{ + "buildDir": "build", + "targets": []interface{}{ + map[string]interface{}{ + "name": "bbgow-amd64-darwin", + "arch": "amd64", + "os": "darwin", + }, + map[string]interface{}{ + "name": "bbgow-amd64-linux", + "arch": "amd64", + "os": "linux", + }, + }, + }, + "exchangeStrategies": []map[string]interface{}{ + { + "on": []string{"binance"}, + "test": map[string]interface{}{ + "symbol": "BTCUSDT", + "baseQuantity": 0.1, + "interval": "1m", + "maxAssetQuantity": 1.1, + "minDropPercentage": -0.05, + }, + }, + }, + }, m) + + yamlText, err := config.YAML() + assert.NoError(t, err) + + yamlTextSource, err := ioutil.ReadFile("testdata/strategy.yaml") + assert.NoError(t, err) + + var sourceMap map[string]interface{} + err = yaml.Unmarshal(yamlTextSource, &sourceMap) + assert.NoError(t, err) + delete(sourceMap, "build") + + var actualMap map[string]interface{} + err = yaml.Unmarshal(yamlText, &actualMap) + assert.NoError(t, err) + delete(actualMap, "build") + + assert.Equal(t, sourceMap, actualMap) }, }, @@ -105,9 +188,8 @@ func TestLoadConfig(t *testing.T) { assert.Len(t, config.ExchangeStrategies, 1) assert.NotNil(t, config.Backtest) assert.NotNil(t, config.Backtest.Account) - assert.NotNil(t, config.Backtest.Account.Balances) - assert.Len(t, config.Backtest.Account.Balances, 2) - assert.NotEmpty(t, config.Backtest.StartTime) + assert.NotNil(t, config.Backtest.Account["binance"].Balances) + assert.Len(t, config.Backtest.Account["binance"].Balances, 2) }, }, } @@ -132,5 +214,52 @@ func TestLoadConfig(t *testing.T) { } }) } +} + +func TestSyncSymbol(t *testing.T) { + t.Run("symbol", func(t *testing.T) { + var ss []SyncSymbol + var err = yaml.Unmarshal([]byte(`- BTCUSDT`), &ss) + assert.NoError(t, err) + assert.Equal(t, []SyncSymbol{ + {Symbol: "BTCUSDT"}, + }, ss) + }) + + t.Run("session:symbol", func(t *testing.T) { + var ss []SyncSymbol + var err = yaml.Unmarshal([]byte(`- max:BTCUSDT`), &ss) + assert.NoError(t, err) + assert.Equal(t, []SyncSymbol{ + {Session: "max", Symbol: "BTCUSDT"}, + }, ss) + }) + + t.Run("object", func(t *testing.T) { + var ss []SyncSymbol + var err = yaml.Unmarshal([]byte(`- { session: "max", symbol: "BTCUSDT" }`), &ss) + assert.NoError(t, err) + assert.Equal(t, []SyncSymbol{ + {Session: "max", Symbol: "BTCUSDT"}, + }, ss) + }) +} + +func Test_categorizeSyncSymbol(t *testing.T) { + var ss []SyncSymbol + var err = yaml.Unmarshal([]byte(` +- BTCUSDT +- ETHUSDT +- max:MAXUSDT +- max:USDTTWD +- binance:BNBUSDT +`), &ss) + assert.NoError(t, err) + assert.NotEmpty(t, ss) + sm, rest := categorizeSyncSymbol(ss) + assert.NotEmpty(t, rest) + assert.NotEmpty(t, sm) + assert.Equal(t, []string{"MAXUSDT", "USDTTWD"}, sm["max"]) + assert.Equal(t, []string{"BNBUSDT"}, sm["binance"]) } diff --git a/pkg/bbgo/context.go b/pkg/bbgo/context.go index 2613c0bc92..5c5a19364a 100644 --- a/pkg/bbgo/context.go +++ b/pkg/bbgo/context.go @@ -8,4 +8,3 @@ import ( type Context struct { sync.Mutex } - diff --git a/pkg/bbgo/db.go b/pkg/bbgo/db.go deleted file mode 100644 index 0ff4f0d139..0000000000 --- a/pkg/bbgo/db.go +++ /dev/null @@ -1,50 +0,0 @@ -package bbgo - -import ( - "context" - "database/sql" - - // register the go migrations - _ "github.com/c9s/bbgo/pkg/migrations" - - "github.com/c9s/rockhopper" - "github.com/go-sql-driver/mysql" - "github.com/jmoiron/sqlx" -) - -func ConnectMySQL(dsn string) (*sqlx.DB, error) { - config, err := mysql.ParseDSN(dsn) - if err != nil { - return nil, err - } - - config.ParseTime = true - dsn = config.FormatDSN() - return sqlx.Connect("mysql", dsn) -} - -func upgradeDB(ctx context.Context, driver string, db *sql.DB) error { - dialect, err := rockhopper.LoadDialect(driver) - if err != nil { - return err - } - - loader := &rockhopper.GoMigrationLoader{} - migrations, err := loader.Load() - if err != nil { - return err - } - - rh := rockhopper.New(driver, dialect, db) - - currentVersion, err := rh.CurrentVersion() - if err != nil { - return err - } - - if err := rockhopper.Up(ctx, rh, migrations, currentVersion, 0); err != nil { - return err - } - - return nil -} diff --git a/pkg/bbgo/environment.go b/pkg/bbgo/environment.go index f60ccb689d..9fc84e783a 100644 --- a/pkg/bbgo/environment.go +++ b/pkg/bbgo/environment.go @@ -1,24 +1,52 @@ package bbgo import ( + "bytes" "context" "fmt" + "image/png" + "io/ioutil" + stdlog "log" + "math/rand" "os" "strings" + "sync" "time" "github.com/codingconcepts/env" - "github.com/jmoiron/sqlx" + "github.com/pkg/errors" + "github.com/pquerna/otp" log "github.com/sirupsen/logrus" + "github.com/slack-go/slack" "github.com/spf13/viper" + "gopkg.in/tucnak/telebot.v2" - "github.com/c9s/bbgo/pkg/accounting/pnl" - "github.com/c9s/bbgo/pkg/cmd/cmdutil" + exchange2 "github.com/c9s/bbgo/pkg/exchange" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/interact" + "github.com/c9s/bbgo/pkg/notifier/slacknotifier" + "github.com/c9s/bbgo/pkg/notifier/telegramnotifier" "github.com/c9s/bbgo/pkg/service" + "github.com/c9s/bbgo/pkg/slack/slacklog" "github.com/c9s/bbgo/pkg/types" "github.com/c9s/bbgo/pkg/util" ) +func init() { + // randomize pulling + rand.Seed(time.Now().UnixNano()) +} + +// IsBackTesting is a global variable that indicates the current environment is back-test or not. +var IsBackTesting = false + +var BackTestService *service.BacktestService + +func SetBackTesting(s *service.BacktestService) { + BackTestService = s + IsBackTesting = true +} + var LoadedExchangeStrategies = make(map[string]SingleExchangeStrategy) var LoadedCrossExchangeStrategies = make(map[string]CrossExchangeStrategy) @@ -41,29 +69,53 @@ func RegisterStrategy(key string, s interface{}) { var emptyTime time.Time +type SyncStatus int + +const ( + SyncNotStarted SyncStatus = iota + Syncing + SyncDone +) + // Environment presents the real exchange data layer type Environment struct { - // Notifiability here for environment is for the streaming data notification - // note that, for back tests, we don't need notification. - Notifiability + DatabaseService *service.DatabaseService + OrderService *service.OrderService + TradeService *service.TradeService + ProfitService *service.ProfitService + PositionService *service.PositionService + BacktestService *service.BacktestService + RewardService *service.RewardService + MarginService *service.MarginService + SyncService *service.SyncService + AccountService *service.AccountService + WithdrawService *service.WithdrawService + DepositService *service.DepositService - PersistenceServiceFacade *PersistenceServiceFacade + // startTime is the time of start point (which is used in the backtest) + startTime time.Time - OrderService *service.OrderService - TradeService *service.TradeService - TradeSync *service.SyncService + // syncStartTime is the time point we want to start the sync (for trades and orders) + syncStartTime time.Time + syncMutex sync.Mutex - // startTime is the time of start point (which is used in the backtest) - startTime time.Time - tradeScanTime time.Time - sessions map[string]*ExchangeSession + syncStatusMutex sync.Mutex + syncStatus SyncStatus + syncConfig *SyncConfig + + sessions map[string]*ExchangeSession } func NewEnvironment() *Environment { + + now := time.Now() return &Environment{ // default trade scan time - tradeScanTime: time.Now().AddDate(0, 0, -7), // sync from 7 days ago + syncStartTime: now.AddDate(-1, 0, 0), // defaults to sync from 1 year ago sessions: make(map[string]*ExchangeSession), + startTime: now, + + syncStatus: SyncNotStarted, } } @@ -76,33 +128,74 @@ func (environ *Environment) Sessions() map[string]*ExchangeSession { return environ.sessions } -func (environ *Environment) ConfigureDatabase(ctx context.Context) error { - if viper.IsSet("mysql-url") { - dsn := viper.GetString("mysql-url") - db, err := ConnectMySQL(dsn) - if err != nil { - return err +func (environ *Environment) SelectSessions(names ...string) map[string]*ExchangeSession { + if len(names) == 0 { + return environ.sessions + } + + sessions := make(map[string]*ExchangeSession) + for _, name := range names { + if s, ok := environ.Session(name); ok { + sessions[name] = s } + } - if err := upgradeDB(ctx, "mysql", db.DB); err != nil { - return err + return sessions +} + +func (environ *Environment) ConfigureDatabase(ctx context.Context) error { + // configureDB configures the database service based on the environment variable + if driver, ok := os.LookupEnv("DB_DRIVER"); ok { + + if dsn, ok := os.LookupEnv("DB_DSN"); ok { + return environ.ConfigureDatabaseDriver(ctx, driver, dsn) } - environ.SetDB(db) + } else if dsn, ok := os.LookupEnv("SQLITE3_DSN"); ok { + + return environ.ConfigureDatabaseDriver(ctx, "sqlite3", dsn) + + } else if dsn, ok := os.LookupEnv("MYSQL_URL"); ok { + + return environ.ConfigureDatabaseDriver(ctx, "mysql", dsn) + } return nil } -func (environ *Environment) SetDB(db *sqlx.DB) *Environment { +func (environ *Environment) ConfigureDatabaseDriver(ctx context.Context, driver string, dsn string) error { + environ.DatabaseService = service.NewDatabaseService(driver, dsn) + err := environ.DatabaseService.Connect() + if err != nil { + return err + } + + if err := environ.DatabaseService.Upgrade(ctx); err != nil { + return err + } + + // get the db connection pool object to create other services + db := environ.DatabaseService.DB environ.OrderService = &service.OrderService{DB: db} environ.TradeService = &service.TradeService{DB: db} - environ.TradeSync = &service.SyncService{ - TradeService: environ.TradeService, - OrderService: environ.OrderService, + environ.RewardService = &service.RewardService{DB: db} + environ.AccountService = &service.AccountService{DB: db} + environ.ProfitService = &service.ProfitService{DB: db} + environ.PositionService = &service.PositionService{DB: db} + environ.MarginService = &service.MarginService{DB: db} + environ.WithdrawService = &service.WithdrawService{DB: db} + environ.DepositService = &service.DepositService{DB: db} + environ.SyncService = &service.SyncService{ + TradeService: environ.TradeService, + OrderService: environ.OrderService, + RewardService: environ.RewardService, + MarginService: environ.MarginService, + WithdrawService: &service.WithdrawService{DB: db}, + DepositService: &service.DepositService{DB: db}, } - return environ + return nil } // AddExchangeSession adds the existing exchange session or pre-created exchange session @@ -117,7 +210,8 @@ func (environ *Environment) AddExchange(name string, exchange types.Exchange) (s return environ.AddExchangeSession(name, session) } -func (environ *Environment) AddExchangesFromConfig(userConfig *Config) error { +func (environ *Environment) ConfigureExchangeSessions(userConfig *Config) error { + // if sessions are not defined, we detect the sessions automatically if len(userConfig.Sessions) == 0 { return environ.AddExchangesByViperKeys() } @@ -126,9 +220,9 @@ func (environ *Environment) AddExchangesFromConfig(userConfig *Config) error { } func (environ *Environment) AddExchangesByViperKeys() error { - for _, n := range SupportedExchanges { + for _, n := range types.SupportedExchanges { if viper.IsSet(string(n) + "-api-key") { - exchange, err := cmdutil.NewExchangeWithEnvVarPrefix(n, "") + exchange, err := exchange2.NewWithEnvVarPrefix(n, "") if err != nil { return err } @@ -140,218 +234,56 @@ func (environ *Environment) AddExchangesByViperKeys() error { return nil } -func (environ *Environment) AddExchangesFromSessionConfig(sessions map[string]Session) error { - for sessionName, sessionConfig := range sessions { - exchangeName, err := types.ValidExchangeName(sessionConfig.ExchangeName) - if err != nil { - return err - } - - exchange, err := cmdutil.NewExchangeWithEnvVarPrefix(exchangeName, sessionConfig.EnvVarPrefix) - if err != nil { +func (environ *Environment) AddExchangesFromSessionConfig(sessions map[string]*ExchangeSession) error { + for sessionName, session := range sessions { + if err := session.InitExchange(sessionName, nil); err != nil { return err } - // configure exchange - if sessionConfig.Margin { - marginExchange, ok := exchange.(types.MarginExchange) - if !ok { - return fmt.Errorf("exchange %s does not support margin", exchangeName) - } - - if sessionConfig.IsolatedMargin { - marginExchange.UseIsolatedMargin(sessionConfig.IsolatedMarginSymbol) - } else { - marginExchange.UseMargin() - } - } - - session := NewExchangeSession(sessionName, exchange) - session.IsMargin = sessionConfig.Margin - session.IsIsolatedMargin = sessionConfig.IsolatedMargin - session.IsolatedMarginSymbol = sessionConfig.IsolatedMarginSymbol environ.AddExchangeSession(sessionName, session) } return nil } +func (environ *Environment) IsBackTesting() bool { + return environ.BacktestService != nil +} + // Init prepares the data that will be used by the strategies func (environ *Environment) Init(ctx context.Context) (err error) { for n := range environ.sessions { var session = environ.sessions[n] - var markets, err = LoadExchangeMarketsWithCache(ctx, session.Exchange) - - if len(markets) == 0 { - return fmt.Errorf("market config should not be empty") - } - - session.markets = markets - - // trade sync and market data store depends on subscribed symbols so we have to do this here. - for symbol := range session.loadedSymbols { - market, ok := markets[symbol] - if !ok { - return fmt.Errorf("market %s is not defined", symbol) - } - - var trades []types.Trade - if environ.TradeSync != nil { - log.Infof("syncing trades from %s for symbol %s...", session.Exchange.Name(), symbol) - if err := environ.TradeSync.SyncTrades(ctx, session.Exchange, symbol, environ.tradeScanTime); err != nil { - return err - } - - tradingFeeCurrency := session.Exchange.PlatformFeeCurrency() - if strings.HasPrefix(symbol, tradingFeeCurrency) { - trades, err = environ.TradeService.QueryForTradingFeeCurrency(session.Exchange.Name(), symbol, tradingFeeCurrency) - } else { - trades, err = environ.TradeService.Query(session.Exchange.Name(), symbol) - } - - if err != nil { - return err - } - - log.Infof("symbol %s: %d trades loaded", symbol, len(trades)) - } - - session.Trades[symbol] = &types.TradeSlice{Trades: trades} - session.Stream.OnTradeUpdate(func(trade types.Trade) { - session.Trades[symbol].Append(trade) - }) - - session.lastPrices[symbol] = 0.0 - - position := &Position{ - Symbol: symbol, - BaseCurrency: market.BaseCurrency, - QuoteCurrency: market.QuoteCurrency, - } - position.AddTrades(trades) - position.BindStream(session.Stream) - session.positions[symbol] = position - - orderStore := NewOrderStore(symbol) - orderStore.BindStream(session.Stream) - session.orderStores[symbol] = orderStore - - marketDataStore := NewMarketDataStore(symbol) - marketDataStore.BindStream(session.Stream) - session.marketDataStores[symbol] = marketDataStore - - standardIndicatorSet := NewStandardIndicatorSet(symbol, marketDataStore) - session.standardIndicatorSets[symbol] = standardIndicatorSet - } - - log.Infof("querying balances from session %s...", session.Name) - balances, err := session.Exchange.QueryAccountBalances(ctx) - if err != nil { - return err - } - - log.Infof("%s account", session.Name) - balances.Print() - - session.Account.UpdateBalances(balances) - session.Account.BindStream(session.Stream) - - session.Stream.OnBalanceUpdate(func(balances types.BalanceMap) { - log.Infof("balance update: %+v", balances) - }) - - // update last prices - session.Stream.OnKLineClosed(func(kline types.KLine) { - log.Infof("kline closed: %+v", kline) - - if _, ok := session.startPrices[kline.Symbol]; !ok { - session.startPrices[kline.Symbol] = kline.Open - } - - session.lastPrices[kline.Symbol] = kline.Close - }) - - // feed klines into the market data store - if environ.startTime == emptyTime { - environ.startTime = time.Now() - } - - var intervals = map[types.Interval]struct{}{} - for _, sub := range session.Subscriptions { - if sub.Channel == types.KLineChannel { - intervals[types.Interval(sub.Options.Interval)] = struct{}{} + if err = session.Init(ctx, environ); err != nil { + // we can skip initialized sessions + if err != ErrSessionAlreadyInitialized { + return err } } + } - for symbol := range session.loadedSymbols { - marketDataStore, ok := session.marketDataStores[symbol] - if !ok { - return fmt.Errorf("symbol %s is not defined", symbol) - } - - var lastPriceTime time.Time - for interval := range intervals { - // avoid querying the last unclosed kline - endTime := environ.startTime.Add(- interval.Duration()) - kLines, err := session.Exchange.QueryKLines(ctx, symbol, interval, types.KLineQueryOptions{ - EndTime: &endTime, - Limit: 1000, // indicators need at least 100 - }) - if err != nil { - return err - } - - if len(kLines) == 0 { - log.Warnf("no kline data for interval %s (end time <= %s)", interval, environ.startTime) - continue - } - - // update last prices by the given kline - lastKLine := kLines[len(kLines)-1] - log.Infof("last kline: %+v", lastKLine) - if lastPriceTime == emptyTime { - session.lastPrices[symbol] = lastKLine.Close - lastPriceTime = lastKLine.EndTime - } else if lastKLine.EndTime.After(lastPriceTime) { - session.lastPrices[symbol] = lastKLine.Close - lastPriceTime = lastKLine.EndTime - } - - for _, k := range kLines { - // let market data store trigger the update, so that the indicator could be updated too. - marketDataStore.AddKLine(k) - } - } - - log.Infof("last price: %f", session.lastPrices[symbol]) - } + return +} - if environ.TradeService != nil { - session.Stream.OnTradeUpdate(func(trade types.Trade) { - if err := environ.TradeService.Insert(trade); err != nil { - log.WithError(err).Errorf("trade insert error: %+v", trade) - } - }) +// Start initializes the symbols data streams +func (environ *Environment) Start(ctx context.Context) (err error) { + for n := range environ.sessions { + var session = environ.sessions[n] + if err = session.InitSymbols(ctx, environ); err != nil { + return err } - - // TODO: move market data store dispatch to here, use one callback to dispatch the market data - // Session.Stream.OnKLineClosed(func(kline types.KLine) { }) } - - return nil + return } func (environ *Environment) ConfigurePersistence(conf *PersistenceConfig) error { - var facade = &PersistenceServiceFacade{ - Memory: NewMemoryService(), - } - if conf.Redis != nil { if err := env.Set(conf.Redis); err != nil { return err } - facade.Redis = NewRedisPersistenceService(conf.Redis) + redisPersistence := service.NewRedisPersistenceService(conf.Redis) + PersistenceServiceFacade.Redis = redisPersistence } if conf.Json != nil { @@ -362,25 +294,24 @@ func (environ *Environment) ConfigurePersistence(conf *PersistenceConfig) error } } - facade.Json = &JsonPersistenceService{Directory: conf.Json.Directory} + jsonPersistence := &service.JsonPersistenceService{Directory: conf.Json.Directory} + PersistenceServiceFacade.Json = jsonPersistence } - environ.PersistenceServiceFacade = facade return nil } -// configure notification rules +// ConfigureNotificationRouting configures the notification rules // for symbol-based routes, we should register the same symbol rules for each session. // for session-based routes, we should set the fixed callbacks for each session -func (environ *Environment) ConfigureNotification(conf *NotificationConfig) error { +func (environ *Environment) ConfigureNotificationRouting(conf *NotificationConfig) error { // configure routing here if conf.SymbolChannels != nil { - environ.SymbolChannelRouter.AddRoute(conf.SymbolChannels) + Notification.SymbolChannelRouter.AddRoute(conf.SymbolChannels) } if conf.SessionChannels != nil { - environ.SessionChannelRouter.AddRoute(conf.SessionChannels) + Notification.SessionChannelRouter.AddRoute(conf.SessionChannels) } - if conf.Routing != nil { // configure passive object notification routing switch conf.Routing.Trade { @@ -388,47 +319,44 @@ func (environ *Environment) ConfigureNotification(conf *NotificationConfig) erro case "$session": defaultTradeUpdateHandler := func(trade types.Trade) { - text := util.Render(TemplateTradeReport, trade) - environ.Notify(text, &trade) + Notify(&trade) } for name := range environ.sessions { session := environ.sessions[name] // if we can route session name to channel successfully... - channel, ok := environ.SessionChannelRouter.Route(name) + channel, ok := Notification.SessionChannelRouter.Route(name) if ok { - session.Stream.OnTradeUpdate(func(trade types.Trade) { - text := util.Render(TemplateTradeReport, trade) - environ.NotifyTo(channel, text, &trade) + session.UserDataStream.OnTradeUpdate(func(trade types.Trade) { + Notification.NotifyTo(channel, &trade) }) } else { - session.Stream.OnTradeUpdate(defaultTradeUpdateHandler) + session.UserDataStream.OnTradeUpdate(defaultTradeUpdateHandler) } } case "$symbol": // configure object routes for Trade - environ.ObjectChannelRouter.Route(func(obj interface{}) (channel string, ok bool) { + Notification.ObjectChannelRouter.Route(func(obj interface{}) (channel string, ok bool) { trade, matched := obj.(*types.Trade) if !matched { return } - channel, ok = environ.SymbolChannelRouter.Route(trade.Symbol) + channel, ok = Notification.SymbolChannelRouter.Route(trade.Symbol) return }) // use same handler for each session handler := func(trade types.Trade) { - text := util.Render(TemplateTradeReport, trade) - channel, ok := environ.RouteObject(&trade) + channel, ok := Notification.RouteObject(&trade) if ok { - environ.NotifyTo(channel, text, &trade) + NotifyTo(channel, &trade) } else { - environ.Notify(text, &trade) + Notify(&trade) } } for _, session := range environ.sessions { - session.Stream.OnTradeUpdate(handler) + session.UserDataStream.OnTradeUpdate(handler) } } @@ -439,46 +367,46 @@ func (environ *Environment) ConfigureNotification(conf *NotificationConfig) erro case "$session": defaultOrderUpdateHandler := func(order types.Order) { text := util.Render(TemplateOrderReport, order) - environ.Notify(text, &order) + Notify(text, &order) } for name := range environ.sessions { session := environ.sessions[name] // if we can route session name to channel successfully... - channel, ok := environ.SessionChannelRouter.Route(name) + channel, ok := Notification.SessionChannelRouter.Route(name) if ok { - session.Stream.OnOrderUpdate(func(order types.Order) { + session.UserDataStream.OnOrderUpdate(func(order types.Order) { text := util.Render(TemplateOrderReport, order) - environ.NotifyTo(channel, text, &order) + NotifyTo(channel, text, &order) }) } else { - session.Stream.OnOrderUpdate(defaultOrderUpdateHandler) + session.UserDataStream.OnOrderUpdate(defaultOrderUpdateHandler) } } case "$symbol": // add object route - environ.ObjectChannelRouter.Route(func(obj interface{}) (channel string, ok bool) { + Notification.ObjectChannelRouter.Route(func(obj interface{}) (channel string, ok bool) { order, matched := obj.(*types.Order) if !matched { return } - channel, ok = environ.SymbolChannelRouter.Route(order.Symbol) + channel, ok = Notification.SymbolChannelRouter.Route(order.Symbol) return }) // use same handler for each session handler := func(order types.Order) { text := util.Render(TemplateOrderReport, order) - channel, ok := environ.RouteObject(&order) + channel, ok := Notification.RouteObject(&order) if ok { - environ.NotifyTo(channel, text, &order) + NotifyTo(channel, text, &order) } else { - environ.Notify(text, &order) + Notify(text, &order) } } for _, session := range environ.sessions { - session.Stream.OnOrderUpdate(handler) + session.UserDataStream.OnOrderUpdate(handler) } } @@ -488,30 +416,33 @@ func (environ *Environment) ConfigureNotification(conf *NotificationConfig) erro case "$symbol": // add object route - environ.ObjectChannelRouter.Route(func(obj interface{}) (channel string, ok bool) { + Notification.ObjectChannelRouter.Route(func(obj interface{}) (channel string, ok bool) { order, matched := obj.(*types.SubmitOrder) if !matched { return } - channel, ok = environ.SymbolChannelRouter.Route(order.Symbol) + channel, ok = Notification.SymbolChannelRouter.Route(order.Symbol) return }) } - // currently not used - switch conf.Routing.PnL { - case "$symbol": - environ.ObjectChannelRouter.Route(func(obj interface{}) (channel string, ok bool) { - report, matched := obj.(*pnl.AverageCostPnlReport) - if !matched { + // currently, not used + // FIXME: this is causing cyclic import + /* + switch conf.Routing.PnL { + case "$symbol": + environ.ObjectChannelRouter.Route(func(obj interface{}) (channel string, ok bool) { + report, matched := obj.(*pnl.AverageCostPnlReport) + if !matched { + return + } + channel, ok = environ.SymbolChannelRouter.Route(report.Symbol) return - } - channel, ok = environ.SymbolChannelRouter.Route(report.Symbol) - return - }) - } + }) + } + */ } return nil @@ -522,13 +453,90 @@ func (environ *Environment) SetStartTime(t time.Time) *Environment { return environ } -// SyncTradesFrom overrides the default trade scan time (-7 days) -func (environ *Environment) SyncTradesFrom(t time.Time) *Environment { - environ.tradeScanTime = t +// SetSyncStartTime overrides the default trade scan time (-7 days) +func (environ *Environment) SetSyncStartTime(t time.Time) *Environment { + environ.syncStartTime = t return environ } +func (environ *Environment) BindSync(config *SyncConfig) { + // skip this if we are running back-test + if environ.BacktestService != nil { + return + } + + // If trade service is configured, we have the db configured + if environ.TradeService == nil { + return + } + + if config == nil || config.UserDataStream == nil { + return + } + + environ.syncConfig = config + + tradeWriterCreator := func(session *ExchangeSession) func(trade types.Trade) { + return func(trade types.Trade) { + trade.IsMargin = session.Margin + trade.IsFutures = session.Futures + if session.Margin { + trade.IsIsolated = session.IsolatedMargin + } else if session.Futures { + trade.IsIsolated = session.IsolatedFutures + } + + // The StrategyID field and the PnL field needs to be updated by the strategy. + // trade.StrategyID, trade.PnL + if err := environ.TradeService.Insert(trade); err != nil { + log.WithError(err).Errorf("trade insert error: %+v", trade) + } + } + } + + orderWriterCreator := func(session *ExchangeSession) func(order types.Order) { + return func(order types.Order) { + order.IsMargin = session.Margin + order.IsFutures = session.Futures + if session.Margin { + order.IsIsolated = session.IsolatedMargin + } else if session.Futures { + order.IsIsolated = session.IsolatedFutures + } + + switch order.Status { + case types.OrderStatusFilled, types.OrderStatusCanceled: + if order.ExecutedQuantity.Sign() > 0 { + if err := environ.OrderService.Insert(order); err != nil { + log.WithError(err).Errorf("order insert error: %+v", order) + } + } + } + } + } + + for _, session := range environ.sessions { + // avoid using the iterator variable. + s2 := session + // if trade sync is on, we will write all received trades + if config.UserDataStream.Trades { + tradeWriter := tradeWriterCreator(s2) + session.UserDataStream.OnTradeUpdate(tradeWriter) + } + + if config.UserDataStream.FilledOrders { + orderWriter := orderWriterCreator(s2) + session.UserDataStream.OnOrderUpdate(orderWriter) + } + } +} + func (environ *Environment) Connect(ctx context.Context) error { + log.Debugf("starting interaction...") + if err := interact.Start(ctx); err != nil { + return err + } + for n := range environ.sessions { // avoid using the placeholder variable for the session because we use that in the callbacks var session = environ.sessions[n] @@ -540,12 +548,261 @@ func (environ *Environment) Connect(ctx context.Context) error { // add the subscribe requests to the stream for _, s := range session.Subscriptions { logger.Infof("subscribing %s %s %v", s.Symbol, s.Channel, s.Options) - session.Stream.Subscribe(s.Channel, s.Symbol, s.Options) + session.MarketDataStream.Subscribe(s.Channel, s.Symbol, s.Options) + } + } + + logger.Infof("connecting %s market data stream...", session.Name) + if err := session.MarketDataStream.Connect(ctx); err != nil { + return err + } + + if !session.PublicOnly { + logger.Infof("connecting %s user data stream...", session.Name) + if err := session.UserDataStream.Connect(ctx); err != nil { + return err } } + } + + return nil +} + +func (environ *Environment) IsSyncing() (status SyncStatus) { + environ.syncStatusMutex.Lock() + status = environ.syncStatus + environ.syncStatusMutex.Unlock() + return status +} + +func (environ *Environment) setSyncing(status SyncStatus) { + environ.syncStatusMutex.Lock() + environ.syncStatus = status + environ.syncStatusMutex.Unlock() +} + +func (environ *Environment) syncWithUserConfig(ctx context.Context, userConfig *Config) error { + sessions := environ.sessions + selectedSessions := userConfig.Sync.Sessions + if len(selectedSessions) > 0 { + sessions = environ.SelectSessions(selectedSessions...) + } + + since := time.Now().AddDate(0, -6, 0) + if userConfig.Sync.Since != nil { + since = userConfig.Sync.Since.Time() + } + + syncSymbolMap, restSymbols := categorizeSyncSymbol(userConfig.Sync.Symbols) + for _, session := range sessions { + syncSymbols := restSymbols + if ss, ok := syncSymbolMap[session.Name]; ok { + syncSymbols = append(syncSymbols, ss...) + } + + if err := environ.syncSession(ctx, session, syncSymbols...); err != nil { + return err + } + + if userConfig.Sync.DepositHistory { + if err := environ.SyncService.SyncDepositHistory(ctx, session.Exchange, since); err != nil { + return err + } + } + + if userConfig.Sync.WithdrawHistory { + if err := environ.SyncService.SyncWithdrawHistory(ctx, session.Exchange, since); err != nil { + return err + } + } + + if userConfig.Sync.RewardHistory { + if err := environ.SyncService.SyncRewardHistory(ctx, session.Exchange, since); err != nil { + return err + } + } + + if userConfig.Sync.MarginHistory { + if err := environ.SyncService.SyncMarginHistory(ctx, session.Exchange, + since, + userConfig.Sync.MarginAssets...); err != nil { + return err + } + } + } + + return nil +} + +// Sync syncs all registered exchange sessions +func (environ *Environment) Sync(ctx context.Context, userConfig ...*Config) error { + if environ.SyncService == nil { + return nil + } + + // for paper trade mode, skip sync + if util.IsPaperTrade() { + return nil + } + + environ.syncMutex.Lock() + defer environ.syncMutex.Unlock() + + environ.setSyncing(Syncing) + defer environ.setSyncing(SyncDone) + + // sync by the defined user config + if len(userConfig) > 0 && userConfig[0] != nil && userConfig[0].Sync != nil { + return environ.syncWithUserConfig(ctx, userConfig[0]) + } + + // the default sync logics + for _, session := range environ.sessions { + if err := environ.syncSession(ctx, session); err != nil { + return err + } + } + + return nil +} + +func (environ *Environment) RecordAsset(t time.Time, session *ExchangeSession, assets types.AssetMap) { + // skip for back-test + if environ.BacktestService != nil { + return + } + + if environ.DatabaseService == nil || environ.AccountService == nil { + return + } + + if err := environ.AccountService.InsertAsset( + t, + session.Name, + session.ExchangeName, + session.SubAccount, + session.Margin, + session.IsolatedMargin, + session.IsolatedMarginSymbol, + assets); err != nil { + log.WithError(err).Errorf("can not insert asset record") + } +} + +func (environ *Environment) RecordPosition(position *types.Position, trade types.Trade, profit *types.Profit) { + // skip for back-test + if environ.BacktestService != nil { + return + } + + if environ.DatabaseService == nil || environ.ProfitService == nil || environ.PositionService == nil { + return + } + + // set profit info to position + if profit != nil { + if position.Strategy == "" && profit.Strategy != "" { + position.Strategy = profit.Strategy + } + + if position.StrategyInstanceID == "" && profit.StrategyInstanceID != "" { + position.StrategyInstanceID = profit.StrategyInstanceID + } + } + + if profit != nil { + if err := environ.PositionService.Insert(position, trade, profit.Profit); err != nil { + log.WithError(err).Errorf("can not insert position record") + } + if err := environ.ProfitService.Insert(*profit); err != nil { + log.WithError(err).Errorf("can not insert profit record: %+v", profit) + } + } else { + if err := environ.PositionService.Insert(position, trade, fixedpoint.Zero); err != nil { + log.WithError(err).Errorf("can not insert position record") + } + } +} + +func (environ *Environment) RecordProfit(profit types.Profit) { + // skip for back-test + if environ.BacktestService != nil { + return + } + + if environ.DatabaseService == nil { + return + } + if environ.ProfitService == nil { + return + } + + if err := environ.ProfitService.Insert(profit); err != nil { + log.WithError(err).Errorf("can not insert profit record: %+v", profit) + } +} + +func (environ *Environment) SyncSession(ctx context.Context, session *ExchangeSession, defaultSymbols ...string) error { + if environ.SyncService == nil { + return nil + } + + environ.syncMutex.Lock() + defer environ.syncMutex.Unlock() + + environ.setSyncing(Syncing) + defer environ.setSyncing(SyncDone) + + return environ.syncSession(ctx, session, defaultSymbols...) +} + +func (environ *Environment) syncSession(ctx context.Context, session *ExchangeSession, defaultSymbols ...string) error { + symbols, err := session.getSessionSymbols(defaultSymbols...) + if err != nil { + return err + } + + log.Infof("syncing symbols %v from session %s", symbols, session.Name) + + return environ.SyncService.SyncSessionSymbols(ctx, session.Exchange, environ.syncStartTime, symbols...) +} + +func (environ *Environment) ConfigureNotificationSystem(userConfig *Config) error { + + // setup default notification config + if userConfig.Notifications == nil { + userConfig.Notifications = &NotificationConfig{ + Routing: &SlackNotificationRouting{ + Trade: "$session", + Order: "$silent", + SubmitOrder: "$silent", + }, + } + } + + var persistence = PersistenceServiceFacade.Get() + + err := environ.setupInteraction(persistence) + if err != nil { + return err + } + + // setup slack + slackToken := viper.GetString("slack-token") + if len(slackToken) > 0 && userConfig.Notifications != nil { + environ.setupSlack(userConfig, slackToken, persistence) + } + + // check if telegram bot token is defined + telegramBotToken := viper.GetString("telegram-bot-token") + if len(telegramBotToken) > 0 { + if err := environ.setupTelegram(userConfig, telegramBotToken, persistence); err != nil { + return err + } + } - logger.Infof("connecting session %s...", session.Name) - if err := session.Stream.Connect(ctx); err != nil { + if userConfig.Notifications != nil { + if err := environ.ConfigureNotificationRouting(userConfig.Notifications); err != nil { return err } } @@ -553,9 +810,317 @@ func (environ *Environment) Connect(ctx context.Context) error { return nil } -func LoadExchangeMarketsWithCache(ctx context.Context, ex types.Exchange) (markets types.MarketMap, err error) { - err = WithCache(fmt.Sprintf("%s-markets", ex.Name()), &markets, func() (interface{}, error) { - return ex.QueryMarkets(ctx) +// getAuthStoreID returns the authentication store id +// if telegram bot token is defined, the bot id will be used. +// if not, env var $USER will be used. +// if both are not defined, a default "default" will be used. +func getAuthStoreID() string { + telegramBotToken := viper.GetString("telegram-bot-token") + if len(telegramBotToken) > 0 { + tt := strings.Split(telegramBotToken, ":") + return tt[0] + } + + userEnv := os.Getenv("USER") + if userEnv != "" { + return userEnv + } + + return "default" +} + +func (environ *Environment) setupInteraction(persistence service.PersistenceService) error { + var otpQRCodeImagePath = "otp.png" + var key *otp.Key + var keyURL string + var authStore = environ.getAuthStore(persistence) + + if v, ok := util.GetEnvVarBool("FLUSH_OTP_KEY"); v && ok { + log.Warnf("flushing otp key...") + if err := authStore.Reset(); err != nil { + return err + } + } + + if err := authStore.Load(&keyURL); err != nil { + log.Warnf("telegram session not found, generating new one-time password key for new telegram session...") + + newKey, err := setupNewOTPKey(otpQRCodeImagePath) + if err != nil { + return errors.Wrapf(err, "failed to setup totp (time-based one time password) key") + } + + key = newKey + keyURL = key.URL() + if err := authStore.Save(keyURL); err != nil { + return err + } + + printOtpAuthGuide(otpQRCodeImagePath) + + } else if keyURL != "" { + key, err = otp.NewKeyFromURL(keyURL) + if err != nil { + log.WithError(err).Errorf("can not load otp key from url: %s, generating new otp key", keyURL) + + newKey, err := setupNewOTPKey(otpQRCodeImagePath) + if err != nil { + return errors.Wrapf(err, "failed to setup totp (time-based one time password) key") + } + + key = newKey + keyURL = key.URL() + if err := authStore.Save(keyURL); err != nil { + return err + } + + printOtpAuthGuide(otpQRCodeImagePath) + } else { + log.Infof("otp key loaded: %s", util.MaskKey(key.Secret())) + printOtpAuthGuide(otpQRCodeImagePath) + } + } + + authStrict := false + authMode := interact.AuthModeToken + authToken := viper.GetString("telegram-bot-auth-token") + + if authToken != "" && key != nil { + authStrict = true + } else if authToken != "" { + authMode = interact.AuthModeToken + } else if key != nil { + authMode = interact.AuthModeOTP + } + + if authMode == interact.AuthModeToken { + log.Debugf("found interaction auth token, using token mode for authorization...") + printAuthTokenGuide(authToken) + } + + interact.AddCustomInteraction(&interact.AuthInteract{ + Strict: authStrict, + Mode: authMode, + Token: authToken, // can be empty string here + // pragma: allowlist nextline secret + OneTimePasswordKey: key, // can be nil here + }) + return nil +} + +func (environ *Environment) getAuthStore(persistence service.PersistenceService) service.Store { + id := getAuthStoreID() + return persistence.NewStore("bbgo", "auth", id) +} + +func (environ *Environment) setupSlack(userConfig *Config, slackToken string, persistence service.PersistenceService) { + conf := userConfig.Notifications.Slack + if conf == nil { + return + } + + if !strings.HasPrefix(slackToken, "xoxb-") { + log.Error("SLACK_BOT_TOKEN must have the prefix \"xoxb-\".") + return + } + + // app-level token (for specific api) + slackAppToken := viper.GetString("slack-app-token") + if !strings.HasPrefix(slackAppToken, "xapp-") { + log.Errorf("SLACK_APP_TOKEN must have the prefix \"xapp-\".") + return + } + + if conf.ErrorChannel != "" { + log.Debugf("found slack configured, setting up log hook...") + log.AddHook(slacklog.NewLogHook(slackToken, conf.ErrorChannel)) + } + + log.Debugf("adding slack notifier with default channel: %s", conf.DefaultChannel) + + var slackOpts = []slack.Option{ + slack.OptionLog(stdlog.New(os.Stdout, "api: ", stdlog.Lshortfile|stdlog.LstdFlags)), + slack.OptionAppLevelToken(slackAppToken), + } + + if b, ok := util.GetEnvVarBool("DEBUG_SLACK"); ok { + slackOpts = append(slackOpts, slack.OptionDebug(b)) + } + + var client = slack.New(slackToken, slackOpts...) + + var notifier = slacknotifier.New(client, conf.DefaultChannel) + Notification.AddNotifier(notifier) + + // allocate a store, so that we can save the chatID for the owner + var messenger = interact.NewSlack(client) + + var sessions = interact.SlackSessionMap{} + var sessionStore = persistence.NewStore("bbgo", "slack") + if err := sessionStore.Load(&sessions); err != nil { + + } else { + // TODO: this is not necessary for slack, but we should find a way to restore the sessions + /* + for _, session := range sessions { + if session.IsAuthorized() { + // notifier.AddChat(session.Chat) + } + } + messenger.RestoreSessions(sessions) + messenger.OnAuthorized(func(userSession *interact.SlackSession) { + if userSession.IsAuthorized() { + // notifier.AddChat(userSession.Chat) + } + }) + */ + } + + interact.AddMessenger(messenger) +} + +func (environ *Environment) setupTelegram(userConfig *Config, telegramBotToken string, persistence service.PersistenceService) error { + tt := strings.Split(telegramBotToken, ":") + telegramID := tt[0] + + bot, err := telebot.NewBot(telebot.Settings{ + // You can also set custom API URL. + // If field is empty it equals to "https://api.telegram.org". + // URL: "http://195.129.111.17:8012", + Token: telegramBotToken, + Poller: &telebot.LongPoller{Timeout: 10 * time.Second}, }) - return markets, err + + if err != nil { + return err + } + + var opts []telegramnotifier.Option + if userConfig.Notifications != nil && userConfig.Notifications.Telegram != nil { + log.Infof("telegram broadcast is enabled") + opts = append(opts, telegramnotifier.UseBroadcast()) + } + + var notifier = telegramnotifier.New(bot, opts...) + Notification.AddNotifier(notifier) + + // allocate a store, so that we can save the chatID for the owner + var messenger = interact.NewTelegram(bot) + + var sessions = interact.TelegramSessionMap{} + var sessionStore = persistence.NewStore("bbgo", "telegram", telegramID) + if err := sessionStore.Load(&sessions); err != nil { + if err != service.ErrPersistenceNotExists { + log.WithError(err).Errorf("unexpected persistence error") + } + } else { + for _, session := range sessions { + if session.IsAuthorized() { + notifier.AddChat(session.Chat) + } + } + + // you must restore the session after the notifier updates + messenger.RestoreSessions(sessions) + } + + messenger.OnAuthorized(func(userSession *interact.TelegramSession) { + if userSession.IsAuthorized() { + notifier.AddChat(userSession.Chat) + } + + log.Infof("user session %d got authorized, saving telegram sessions...", userSession.User.ID) + if err := sessionStore.Save(messenger.Sessions()); err != nil { + log.WithError(err).Errorf("telegram session save error") + } + }) + + interact.AddMessenger(messenger) + return nil +} + +func writeOTPKeyAsQRCodePNG(key *otp.Key, imagePath string) error { + // Convert TOTP key into a PNG + var buf bytes.Buffer + img, err := key.Image(512, 512) + if err != nil { + return err + } + + if err := png.Encode(&buf, img); err != nil { + return err + } + + if err := ioutil.WriteFile(imagePath, buf.Bytes(), 0644); err != nil { + return err + } + + return nil +} + +// setupNewOTPKey generates a new otp key and save the secret as a qrcode image +func setupNewOTPKey(qrcodeImagePath string) (*otp.Key, error) { + key, err := service.NewDefaultTotpKey() + if err != nil { + return nil, errors.Wrapf(err, "failed to setup totp (time-based one time password) key") + } + + printOtpKey(key) + + if err := writeOTPKeyAsQRCodePNG(key, qrcodeImagePath); err != nil { + return nil, err + } + + return key, nil +} + +func printOtpKey(key *otp.Key) { + fmt.Println("") + fmt.Println("====================================================================") + fmt.Println(" PLEASE STORE YOUR OTP KEY SAFELY ") + fmt.Println("====================================================================") + fmt.Printf(" Issuer: %s\n", key.Issuer()) + fmt.Printf(" AccountName: %s\n", key.AccountName()) + fmt.Printf(" Secret: %s\n", key.Secret()) + fmt.Printf(" Key URL: %s\n", key.URL()) + fmt.Println("====================================================================") + fmt.Println("") +} + +func printOtpAuthGuide(qrcodeImagePath string) { + fmt.Printf(` +To scan your OTP QR code, please run the following command: + + open %s + +For telegram, send the auth command with the generated one-time password to the bbo bot you created to enable the notification: + + /auth + +`, qrcodeImagePath) +} + +func printAuthTokenGuide(token string) { + fmt.Printf(` +For telegram, send the following command to the bbgo bot you created to enable the notification: + + /auth + +And then enter your token + + %s + +`, token) +} + +func (session *ExchangeSession) getSessionSymbols(defaultSymbols ...string) ([]string, error) { + if session.IsolatedMargin { + return []string{session.IsolatedMarginSymbol}, nil + } + + if len(defaultSymbols) > 0 { + return defaultSymbols, nil + } + + return session.FindPossibleSymbols() } diff --git a/pkg/bbgo/errors.go b/pkg/bbgo/errors.go new file mode 100644 index 0000000000..cb0518be0a --- /dev/null +++ b/pkg/bbgo/errors.go @@ -0,0 +1,5 @@ +package bbgo + +import "errors" + +var ErrSessionAlreadyInitialized = errors.New("session is already initialized") diff --git a/pkg/bbgo/graceful_shutdown.go b/pkg/bbgo/graceful_shutdown.go new file mode 100644 index 0000000000..b35482ce27 --- /dev/null +++ b/pkg/bbgo/graceful_shutdown.go @@ -0,0 +1,20 @@ +package bbgo + +import ( + "context" + "sync" +) + +//go:generate callbackgen -type Graceful +type Graceful struct { + shutdownCallbacks []func(ctx context.Context, wg *sync.WaitGroup) +} + +func (g *Graceful) Shutdown(ctx context.Context) { + var wg sync.WaitGroup + wg.Add(len(g.shutdownCallbacks)) + + go g.EmitShutdown(ctx, &wg) + + wg.Wait() +} diff --git a/pkg/bbgo/injection.go b/pkg/bbgo/injection.go index 25dd14100e..0db8cf2289 100644 --- a/pkg/bbgo/injection.go +++ b/pkg/bbgo/injection.go @@ -7,32 +7,12 @@ import ( "github.com/sirupsen/logrus" ) -func isSymbolBasedStrategy(rs reflect.Value) (string, bool) { - field := rs.FieldByName("Symbol") - if !field.IsValid() { - return "", false - } - - if field.Kind() != reflect.String { - return "", false - } - - return field.String(), true -} - -func hasField(rs reflect.Value, fieldName string) (field reflect.Value, ok bool) { - field = rs.FieldByName(fieldName) - return field, field.IsValid() -} - func injectField(rs reflect.Value, fieldName string, obj interface{}, pointerOnly bool) error { field := rs.FieldByName(fieldName) if !field.IsValid() { return nil } - logrus.Infof("found %s in %s, injecting %T...", fieldName, rs.Type(), obj) - if !field.CanSet() { return fmt.Errorf("field %s of %s can not be set", fieldName, rs.Type()) } @@ -57,3 +37,87 @@ func injectField(rs reflect.Value, fieldName string, obj interface{}, pointerOnl return nil } + +// parseStructAndInject parses the struct fields and injects the objects into the corresponding fields by its type. +// if the given object is a reference of an object, the type of the target field MUST BE a pointer field. +// if the given object is a struct value, the type of the target field CAN BE a pointer field or a struct value field. +func parseStructAndInject(f interface{}, objects ...interface{}) error { + sv := reflect.ValueOf(f) + st := reflect.TypeOf(f) + + if st.Kind() != reflect.Ptr { + return fmt.Errorf("f needs to be a pointer of a struct, %s given", st) + } + + // solve the reference + st = st.Elem() + sv = sv.Elem() + + if st.Kind() != reflect.Struct { + return fmt.Errorf("f needs to be a struct, %s given", st) + } + + for i := 0; i < sv.NumField(); i++ { + fv := sv.Field(i) + ft := fv.Type() + + // skip unexported fields + if !st.Field(i).IsExported() { + continue + } + + fieldName := st.Field(i).Name + + switch k := fv.Kind(); k { + + case reflect.Ptr, reflect.Struct: + for oi := 0; oi < len(objects); oi++ { + obj := objects[oi] + if obj == nil { + continue + } + + ot := reflect.TypeOf(obj) + if ft.AssignableTo(ot) { + if !fv.CanSet() { + return fmt.Errorf("field %v of %s can not be set to %s, make sure it is an exported field", fv, sv.Type(), ot) + } + + if k == reflect.Ptr && !fv.IsNil() { + logrus.Debugf("[injection] field %s is not nil, not injecting", fieldName) + continue + } + + if k == reflect.Ptr && ot.Kind() == reflect.Struct { + logrus.Debugf("[injection] found ptr + struct, injecting field %s to %T", fieldName, obj) + fv.Set(reflect.ValueOf(obj).Addr()) + } else { + logrus.Debugf("[injection] injecting field %s to %T", fieldName, obj) + fv.Set(reflect.ValueOf(obj)) + } + } + } + + case reflect.Interface: + for oi := 0; oi < len(objects); oi++ { + obj := objects[oi] + if obj == nil { + continue + } + + objT := reflect.TypeOf(obj) + logrus.Debugln( + ft.PkgPath(), + ft.Name(), + objT, "implements", ft, "=", objT.Implements(ft), + ) + + if objT.Implements(ft) { + fv.Set(reflect.ValueOf(obj)) + } + } + } + } + + return nil +} diff --git a/pkg/bbgo/injection_test.go b/pkg/bbgo/injection_test.go new file mode 100644 index 0000000000..dd63703207 --- /dev/null +++ b/pkg/bbgo/injection_test.go @@ -0,0 +1,105 @@ +package bbgo + +import ( + "reflect" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/service" + "github.com/c9s/bbgo/pkg/types" +) + +func Test_injectField(t *testing.T) { + type TT struct { + TradeService *service.TradeService + } + + // only pointer object can be set. + var tt = &TT{} + + // get the value of the pointer, or it can not be set. + var rv = reflect.ValueOf(tt).Elem() + + _, ret := hasField(rv, "TradeService") + assert.True(t, ret) + + ts := &service.TradeService{} + + err := injectField(rv, "TradeService", ts, true) + assert.NoError(t, err) +} + +func Test_parseStructAndInject(t *testing.T) { + t.Run("skip nil", func(t *testing.T) { + ss := struct { + a int + Env *Environment + }{ + a: 1, + Env: nil, + } + err := parseStructAndInject(&ss, nil) + assert.NoError(t, err) + assert.Nil(t, ss.Env) + }) + t.Run("pointer", func(t *testing.T) { + ss := struct { + a int + Env *Environment + }{ + a: 1, + Env: nil, + } + err := parseStructAndInject(&ss, &Environment{}) + assert.NoError(t, err) + assert.NotNil(t, ss.Env) + }) + + t.Run("composition", func(t *testing.T) { + type TT struct { + *service.TradeService + } + ss := TT{} + err := parseStructAndInject(&ss, &service.TradeService{}) + assert.NoError(t, err) + assert.NotNil(t, ss.TradeService) + }) + + t.Run("struct", func(t *testing.T) { + ss := struct { + a int + Env Environment + }{ + a: 1, + } + err := parseStructAndInject(&ss, Environment{ + startTime: time.Now(), + }) + assert.NoError(t, err) + assert.NotEqual(t, time.Time{}, ss.Env.startTime) + }) + t.Run("interface/any", func(t *testing.T) { + ss := struct { + Any interface{} // anything + }{ + Any: nil, + } + err := parseStructAndInject(&ss, &Environment{ + startTime: time.Now(), + }) + assert.NoError(t, err) + assert.NotNil(t, ss.Any) + }) + t.Run("interface/stringer", func(t *testing.T) { + ss := struct { + Stringer types.Stringer // stringer interface + }{ + Stringer: nil, + } + err := parseStructAndInject(&ss, &types.Trade{}) + assert.NoError(t, err) + assert.NotNil(t, ss.Stringer) + }) +} diff --git a/pkg/bbgo/interact.go b/pkg/bbgo/interact.go new file mode 100644 index 0000000000..9d333f9136 --- /dev/null +++ b/pkg/bbgo/interact.go @@ -0,0 +1,448 @@ +package bbgo + +import ( + "context" + "fmt" + "path" + "reflect" + "strconv" + "strings" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/interact" + "github.com/c9s/bbgo/pkg/types" +) + +type PositionCloser interface { + ClosePosition(ctx context.Context, percentage fixedpoint.Value) error +} + +type PositionReader interface { + CurrentPosition() *types.Position +} + +type closePositionContext struct { + signature string + closer PositionCloser + percentage fixedpoint.Value +} + +type CoreInteraction struct { + environment *Environment + trader *Trader + + exchangeStrategies map[string]SingleExchangeStrategy + closePositionContext closePositionContext +} + +func NewCoreInteraction(environment *Environment, trader *Trader) *CoreInteraction { + return &CoreInteraction{ + environment: environment, + trader: trader, + exchangeStrategies: make(map[string]SingleExchangeStrategy), + } +} + +func getStrategySignatures(exchangeStrategies map[string]SingleExchangeStrategy) []string { + var strategies []string + for signature := range exchangeStrategies { + strategies = append(strategies, signature) + } + + return strategies +} + +func filterStrategyByInterface(checkInterface interface{}, exchangeStrategies map[string]SingleExchangeStrategy) (strategies map[string]SingleExchangeStrategy, found bool) { + found = false + strategies = make(map[string]SingleExchangeStrategy) + rt := reflect.TypeOf(checkInterface).Elem() + for signature, strategy := range exchangeStrategies { + if ok := reflect.TypeOf(strategy).Implements(rt); ok { + strategies[signature] = strategy + found = true + } + } + + return strategies, found +} + +func generateStrategyButtonsForm(strategies map[string]SingleExchangeStrategy) [][3]string { + var buttonsForm [][3]string + signatures := getStrategySignatures(strategies) + for _, signature := range signatures { + buttonsForm = append(buttonsForm, [3]string{signature, "strategy", signature}) + } + + return buttonsForm +} + +func (it *CoreInteraction) Commands(i *interact.Interact) { + i.PrivateCommand("/sessions", "List Exchange Sessions", func(reply interact.Reply) error { + switch r := reply.(type) { + case *interact.SlackReply: + // call slack specific api to build the reply object + _ = r + } + + message := "Your connected sessions:\n" + for name, session := range it.environment.Sessions() { + message += "- " + name + " (" + session.ExchangeName.String() + ")\n" + } + + reply.Message(message) + return nil + }) + + i.PrivateCommand("/balances", "Show balances", func(reply interact.Reply) error { + reply.Message("Please select an exchange session") + for name := range it.environment.Sessions() { + reply.AddButton(name, "session", name) + } + return nil + }).Next(func(sessionName string, reply interact.Reply) error { + session, ok := it.environment.Session(sessionName) + if !ok { + reply.Message(fmt.Sprintf("Session %s not found", sessionName)) + return fmt.Errorf("session %s not found", sessionName) + } + + message := "Your balances\n" + balances := session.GetAccount().Balances() + for _, balance := range balances { + if balance.Total().IsZero() { + continue + } + + message += "- " + balance.String() + "\n" + } + + reply.Message(message) + return nil + }) + + i.PrivateCommand("/position", "Show Position", func(reply interact.Reply) error { + // it.trader.exchangeStrategies + // send symbol options + if strategies, found := filterStrategyByInterface((*PositionReader)(nil), it.exchangeStrategies); found { + reply.AddMultipleButtons(generateStrategyButtonsForm(strategies)) + reply.Message("Please choose one strategy") + } else { + reply.Message("No strategy supports PositionReader") + } + return nil + }).Cycle(func(signature string, reply interact.Reply) error { + strategy, ok := it.exchangeStrategies[signature] + if !ok { + reply.Message("Strategy not found") + return fmt.Errorf("strategy %s not found", signature) + } + + reader, implemented := strategy.(PositionReader) + if !implemented { + reply.Message(fmt.Sprintf("Strategy %s does not support position close", signature)) + return fmt.Errorf("strategy %s does not implement PositionCloser interface", signature) + } + + position := reader.CurrentPosition() + if position != nil { + reply.Send("Your current position:") + reply.Send(position.PlainText()) + + if position.Base.IsZero() { + reply.Message(fmt.Sprintf("Strategy %q has no opened position", signature)) + return fmt.Errorf("strategy %T has no opened position", strategy) + } + } + + if kc, ok := reply.(interact.KeyboardController); ok { + kc.RemoveKeyboard() + } + + return nil + }) + + i.PrivateCommand("/closeposition", "Close position", func(reply interact.Reply) error { + // it.trader.exchangeStrategies + // send symbol options + if strategies, found := filterStrategyByInterface((*PositionCloser)(nil), it.exchangeStrategies); found { + reply.AddMultipleButtons(generateStrategyButtonsForm(strategies)) + reply.Message("Please choose one strategy") + } else { + reply.Message("No strategy supports PositionCloser") + } + return nil + }).Next(func(signature string, reply interact.Reply) error { + strategy, ok := it.exchangeStrategies[signature] + if !ok { + reply.Message("Strategy not found") + return fmt.Errorf("strategy %s not found", signature) + } + + closer, implemented := strategy.(PositionCloser) + if !implemented { + reply.Message(fmt.Sprintf("Strategy %s does not support position close", signature)) + return fmt.Errorf("strategy %s does not implement PositionCloser interface", signature) + } + + it.closePositionContext.closer = closer + it.closePositionContext.signature = signature + + if reader, implemented := strategy.(PositionReader); implemented { + position := reader.CurrentPosition() + if position != nil { + reply.Send("Your current position:") + reply.Send(position.PlainText()) + + if position.Base.IsZero() { + reply.Message("No opened position") + if kc, ok := reply.(interact.KeyboardController); ok { + kc.RemoveKeyboard() + } + return fmt.Errorf("no opened position") + } + } + } + + reply.Message("Choose or enter the percentage to close") + for _, p := range []string{"5%", "25%", "50%", "80%", "100%"} { + reply.AddButton(p, "percentage", p) + } + + return nil + }).Next(func(percentageStr string, reply interact.Reply) error { + percentage, err := fixedpoint.NewFromString(percentageStr) + if err != nil { + reply.Message(fmt.Sprintf("%q is not a valid percentage string", percentageStr)) + return err + } + + if kc, ok := reply.(interact.KeyboardController); ok { + kc.RemoveKeyboard() + } + + err = it.closePositionContext.closer.ClosePosition(context.Background(), percentage) + if err != nil { + reply.Message(fmt.Sprintf("Failed to close the position, %s", err.Error())) + return err + } + + reply.Message("Done") + return nil + }) + + i.PrivateCommand("/status", "Strategy Status", func(reply interact.Reply) error { + // it.trader.exchangeStrategies + // send symbol options + if strategies, found := filterStrategyByInterface((*StrategyStatusReader)(nil), it.exchangeStrategies); found { + reply.AddMultipleButtons(generateStrategyButtonsForm(strategies)) + reply.Message("Please choose a strategy") + } else { + reply.Message("No strategy supports StrategyStatusReader") + } + return nil + }).Next(func(signature string, reply interact.Reply) error { + strategy, ok := it.exchangeStrategies[signature] + if !ok { + reply.Message("Strategy not found") + return fmt.Errorf("strategy %s not found", signature) + } + + controller, implemented := strategy.(StrategyStatusReader) + if !implemented { + reply.Message(fmt.Sprintf("Strategy %s does not support StrategyStatusReader", signature)) + return fmt.Errorf("strategy %s does not implement StrategyStatusReader", signature) + } + + status := controller.GetStatus() + + if kc, ok := reply.(interact.KeyboardController); ok { + kc.RemoveKeyboard() + } + + if status == types.StrategyStatusRunning { + reply.Message(fmt.Sprintf("Strategy %s is running.", signature)) + } else if status == types.StrategyStatusStopped { + reply.Message(fmt.Sprintf("Strategy %s is not running.", signature)) + } + + return nil + }) + + i.PrivateCommand("/suspend", "Suspend Strategy", func(reply interact.Reply) error { + // it.trader.exchangeStrategies + // send symbol options + if strategies, found := filterStrategyByInterface((*StrategyToggler)(nil), it.exchangeStrategies); found { + reply.AddMultipleButtons(generateStrategyButtonsForm(strategies)) + reply.Message("Please choose one strategy") + } else { + reply.Message("No strategy supports StrategyToggler") + } + return nil + }).Next(func(signature string, reply interact.Reply) error { + strategy, ok := it.exchangeStrategies[signature] + if !ok { + reply.Message("Strategy not found") + return fmt.Errorf("strategy %s not found", signature) + } + + controller, implemented := strategy.(StrategyToggler) + if !implemented { + reply.Message(fmt.Sprintf("Strategy %s does not support StrategyToggler", signature)) + return fmt.Errorf("strategy %s does not implement StrategyToggler", signature) + } + + // Check strategy status before suspend + if controller.GetStatus() != types.StrategyStatusRunning { + reply.Message(fmt.Sprintf("Strategy %s is not running.", signature)) + return nil + } + + if kc, ok := reply.(interact.KeyboardController); ok { + kc.RemoveKeyboard() + } + + if err := controller.Suspend(); err != nil { + reply.Message(fmt.Sprintf("Failed to suspend the strategy, %s", err.Error())) + return err + } + + reply.Message(fmt.Sprintf("Strategy %s suspended.", signature)) + return nil + }) + + i.PrivateCommand("/resume", "Resume Strategy", func(reply interact.Reply) error { + // it.trader.exchangeStrategies + // send symbol options + if strategies, found := filterStrategyByInterface((*StrategyToggler)(nil), it.exchangeStrategies); found { + reply.AddMultipleButtons(generateStrategyButtonsForm(strategies)) + reply.Message("Please choose one strategy") + } else { + reply.Message("No strategy supports StrategyToggler") + } + return nil + }).Next(func(signature string, reply interact.Reply) error { + strategy, ok := it.exchangeStrategies[signature] + if !ok { + reply.Message("Strategy not found") + return fmt.Errorf("strategy %s not found", signature) + } + + controller, implemented := strategy.(StrategyToggler) + if !implemented { + reply.Message(fmt.Sprintf("Strategy %s does not support StrategyToggler", signature)) + return fmt.Errorf("strategy %s does not implement StrategyToggler", signature) + } + + // Check strategy status before suspend + if controller.GetStatus() != types.StrategyStatusStopped { + reply.Message(fmt.Sprintf("Strategy %s is running.", signature)) + return nil + } + + if kc, ok := reply.(interact.KeyboardController); ok { + kc.RemoveKeyboard() + } + + if err := controller.Resume(); err != nil { + reply.Message(fmt.Sprintf("Failed to resume the strategy, %s", err.Error())) + return err + } + + reply.Message(fmt.Sprintf("Strategy %s resumed.", signature)) + return nil + }) + + i.PrivateCommand("/emergencystop", "Emergency Stop", func(reply interact.Reply) error { + // it.trader.exchangeStrategies + // send symbol options + if strategies, found := filterStrategyByInterface((*EmergencyStopper)(nil), it.exchangeStrategies); found { + reply.AddMultipleButtons(generateStrategyButtonsForm(strategies)) + reply.Message("Please choose one strategy") + } else { + reply.Message("No strategy supports EmergencyStopper") + } + return nil + }).Next(func(signature string, reply interact.Reply) error { + strategy, ok := it.exchangeStrategies[signature] + if !ok { + reply.Message("Strategy not found") + return fmt.Errorf("strategy %s not found", signature) + } + + controller, implemented := strategy.(EmergencyStopper) + if !implemented { + reply.Message(fmt.Sprintf("Strategy %s does not support EmergencyStopper", signature)) + return fmt.Errorf("strategy %s does not implement EmergencyStopper", signature) + } + + if kc, ok := reply.(interact.KeyboardController); ok { + kc.RemoveKeyboard() + } + + if err := controller.EmergencyStop(); err != nil { + reply.Message(fmt.Sprintf("Failed to emergency stop the strategy, %s", err.Error())) + return err + } + + reply.Message(fmt.Sprintf("Strategy %s stopped and the position closed.", signature)) + return nil + }) +} + +func (it *CoreInteraction) Initialize() error { + // re-map exchange strategies into the signature-object map + for sessionID, strategies := range it.trader.exchangeStrategies { + for _, strategy := range strategies { + signature, err := getStrategySignature(strategy) + if err != nil { + return err + } + + key := sessionID + "." + signature + it.exchangeStrategies[key] = strategy + } + } + return nil +} + +// getStrategySignature returns strategy instance unique signature +func getStrategySignature(strategy SingleExchangeStrategy) (string, error) { + // Returns instance ID + var signature = callID(strategy) + if signature != "" { + return signature, nil + } + + // Use reflect to build instance signature + rv := reflect.ValueOf(strategy).Elem() + if rv.Kind() != reflect.Struct { + return "", fmt.Errorf("strategy %T instance is not a struct", strategy) + } + + signature = path.Base(rv.Type().PkgPath()) + for i := 0; i < rv.NumField(); i++ { + field := rv.Field(i) + fieldName := rv.Type().Field(i).Name + if field.Kind() == reflect.String && fieldName != "Status" { + str := field.String() + if len(str) > 0 { + signature += "." + field.String() + } + } + } + + return signature, nil +} + +func parseFloatPercent(s string, bitSize int) (f float64, err error) { + i := strings.Index(s, "%") + if i < 0 { + return strconv.ParseFloat(s, bitSize) + } + + f, err = strconv.ParseFloat(s[:i], bitSize) + if err != nil { + return 0, err + } + return f / 100.0, nil +} diff --git a/pkg/bbgo/interact_test.go b/pkg/bbgo/interact_test.go new file mode 100644 index 0000000000..21d56f1628 --- /dev/null +++ b/pkg/bbgo/interact_test.go @@ -0,0 +1,33 @@ +package bbgo + +import ( + "context" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +type myStrategy struct { + Symbol string `json:"symbol"` +} + +func (m myStrategy) ID() string { + return "mystrategy" +} + +func (m myStrategy) InstanceID() string { + return fmt.Sprintf("%s:%s", m.ID(), m.Symbol) +} + +func (m *myStrategy) Run(ctx context.Context, orderExecutor OrderExecutor, session *ExchangeSession) error { + return nil +} + +func Test_getStrategySignature(t *testing.T) { + signature, err := getStrategySignature(&myStrategy{ + Symbol: "BTCUSDT", + }) + assert.NoError(t, err) + assert.Equal(t, "mystrategy:BTCUSDT", signature) +} diff --git a/pkg/bbgo/localactiveorderbook_callbacks.go b/pkg/bbgo/localactiveorderbook_callbacks.go deleted file mode 100644 index cd58cb5d29..0000000000 --- a/pkg/bbgo/localactiveorderbook_callbacks.go +++ /dev/null @@ -1,17 +0,0 @@ -// Code generated by "callbackgen -type LocalActiveOrderBook"; DO NOT EDIT. - -package bbgo - -import ( - "github.com/c9s/bbgo/pkg/types" -) - -func (b *LocalActiveOrderBook) OnFilled(cb func(o types.Order)) { - b.filledCallbacks = append(b.filledCallbacks, cb) -} - -func (b *LocalActiveOrderBook) EmitFilled(o types.Order) { - for _, cb := range b.filledCallbacks { - cb(o) - } -} diff --git a/pkg/bbgo/marketdatastore.go b/pkg/bbgo/marketdatastore.go index 801bc6d903..03f17d1f24 100644 --- a/pkg/bbgo/marketdatastore.go +++ b/pkg/bbgo/marketdatastore.go @@ -2,70 +2,41 @@ package bbgo import "github.com/c9s/bbgo/pkg/types" +const MaxNumOfKLines = 5_000 +const MaxNumOfKLinesTruncate = 100 + // MarketDataStore receives and maintain the public market data //go:generate callbackgen -type MarketDataStore type MarketDataStore struct { Symbol string // KLineWindows stores all loaded klines per interval - KLineWindows map[types.Interval]types.KLineWindow `json:"-"` - - kLineWindowUpdateCallbacks []func(interval types.Interval, kline types.KLineWindow) - - orderBook *types.StreamOrderBook + KLineWindows map[types.Interval]*types.KLineWindow `json:"-"` - orderBookUpdateCallbacks []func(orderBook *types.StreamOrderBook) + kLineWindowUpdateCallbacks []func(interval types.Interval, klines types.KLineWindow) } func NewMarketDataStore(symbol string) *MarketDataStore { return &MarketDataStore{ Symbol: symbol, - orderBook: types.NewStreamBook(symbol), - // KLineWindows stores all loaded klines per interval - KLineWindows: make(map[types.Interval]types.KLineWindow, len(types.SupportedIntervals)), // 12 interval, 1m,5m,15m,30m,1h,2h,4h,6h,12h,1d,3d,1w + KLineWindows: make(map[types.Interval]*types.KLineWindow, len(types.SupportedIntervals)), // 12 interval, 1m,5m,15m,30m,1h,2h,4h,6h,12h,1d,3d,1w } } -func (store *MarketDataStore) SetKLineWindows(windows map[types.Interval]types.KLineWindow) { +func (store *MarketDataStore) SetKLineWindows(windows map[types.Interval]*types.KLineWindow) { store.KLineWindows = windows } -func (store *MarketDataStore) OrderBook() types.OrderBook { - return store.orderBook.Copy() -} - // KLinesOfInterval returns the kline window of the given interval -func (store *MarketDataStore) KLinesOfInterval(interval types.Interval) (kLines types.KLineWindow, ok bool) { +func (store *MarketDataStore) KLinesOfInterval(interval types.Interval) (kLines *types.KLineWindow, ok bool) { kLines, ok = store.KLineWindows[interval] return kLines, ok } -func (store *MarketDataStore) handleOrderBookUpdate(book types.OrderBook) { - if book.Symbol != store.Symbol { - return - } - - store.orderBook.Update(book) - - store.EmitOrderBookUpdate(store.orderBook) -} - -func (store *MarketDataStore) handleOrderBookSnapshot(book types.OrderBook) { - if book.Symbol != store.Symbol { - return - } - - store.orderBook.Load(book) -} - func (store *MarketDataStore) BindStream(stream types.Stream) { stream.OnKLineClosed(store.handleKLineClosed) - stream.OnBookSnapshot(store.handleOrderBookSnapshot) - stream.OnBookUpdate(store.handleOrderBookUpdate) - - store.orderBook.BindStream(stream) } func (store *MarketDataStore) handleKLineClosed(kline types.KLine) { @@ -79,10 +50,15 @@ func (store *MarketDataStore) handleKLineClosed(kline types.KLine) { func (store *MarketDataStore) AddKLine(kline types.KLine) { window, ok := store.KLineWindows[kline.Interval] if !ok { - window = types.KLineWindow{kline} - } else { - window.Add(kline) + var tmp = make(types.KLineWindow, 0, 1000) + store.KLineWindows[kline.Interval] = &tmp + window = &tmp } - store.KLineWindows[kline.Interval] = window - store.EmitKLineWindowUpdate(kline.Interval, window) + window.Add(kline) + + if len(*window) > MaxNumOfKLines { + *window = (*window)[MaxNumOfKLinesTruncate-1:] + } + + store.EmitKLineWindowUpdate(kline.Interval, *window) } diff --git a/pkg/bbgo/marketdatastore_callbacks.go b/pkg/bbgo/marketdatastore_callbacks.go index 1c9d0d5dc9..4acaccb103 100644 --- a/pkg/bbgo/marketdatastore_callbacks.go +++ b/pkg/bbgo/marketdatastore_callbacks.go @@ -6,22 +6,12 @@ import ( "github.com/c9s/bbgo/pkg/types" ) -func (store *MarketDataStore) OnKLineWindowUpdate(cb func(interval types.Interval, kline types.KLineWindow)) { +func (store *MarketDataStore) OnKLineWindowUpdate(cb func(interval types.Interval, klines types.KLineWindow)) { store.kLineWindowUpdateCallbacks = append(store.kLineWindowUpdateCallbacks, cb) } -func (store *MarketDataStore) EmitKLineWindowUpdate(interval types.Interval, kline types.KLineWindow) { +func (store *MarketDataStore) EmitKLineWindowUpdate(interval types.Interval, klines types.KLineWindow) { for _, cb := range store.kLineWindowUpdateCallbacks { - cb(interval, kline) - } -} - -func (store *MarketDataStore) OnOrderBookUpdate(cb func(orderBook *types.StreamOrderBook)) { - store.orderBookUpdateCallbacks = append(store.orderBookUpdateCallbacks, cb) -} - -func (store *MarketDataStore) EmitOrderBookUpdate(orderBook *types.StreamOrderBook) { - for _, cb := range store.orderBookUpdateCallbacks { - cb(orderBook) + cb(interval, klines) } } diff --git a/pkg/bbgo/metrics.go b/pkg/bbgo/metrics.go new file mode 100644 index 0000000000..998ee71a90 --- /dev/null +++ b/pkg/bbgo/metrics.go @@ -0,0 +1,112 @@ +package bbgo + +import "github.com/prometheus/client_golang/prometheus" + +var ( + metricsConnectionStatus = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "bbgo_connection_status", + Help: "bbgo exchange session connection status", + }, + []string{ + "exchange", // exchange name + "channel", // channel: user or market + "margin", // margin type: none, margin or isolated + "symbol", // margin symbol of the connection. + }, + ) + + metricsLockedBalances = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "bbgo_balances_locked", + Help: "bbgo exchange locked balances", + }, + []string{ + "exchange", // exchange name + "margin", // margin of connection. 1 or 0 + "symbol", // margin symbol of the connection. + "currency", + }, + ) + + metricsAvailableBalances = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "bbgo_balances_available", + Help: "bbgo exchange available balances", + }, + []string{ + "exchange", // exchange name + "margin", // margin of connection. none, margin or isolated + "symbol", // margin symbol of the connection. + "currency", + }, + ) + + metricsTotalBalances = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "bbgo_balances_total", + Help: "bbgo exchange session total balances", + }, + []string{ + "exchange", // exchange name + "margin", // margin of connection. none, margin or isolated + "symbol", // margin symbol of the connection. + "currency", + }, + ) + + metricsTradesTotal = prometheus.NewCounterVec( + prometheus.CounterOpts{ + Name: "bbgo_trades_total", + Help: "bbgo exchange session trades", + }, + []string{ + "exchange", // exchange name + "margin", // margin of connection. none, margin or isolated + "symbol", // margin symbol of the connection. + "side", // side: buy or sell + "liquidity", // maker or taker + }, + ) + + metricsTradingVolume = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "bbgo_trading_volume", + Help: "bbgo trading volume", + }, + []string{ + "exchange", // exchange name + "margin", // margin of connection. none, margin or isolated + "symbol", // margin symbol of the connection. + "side", // side: buy or sell + "liquidity", // maker or taker + }, + ) + + metricsLastUpdateTimeBalance = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "bbgo_last_update_time", + Help: "bbgo last update time of different channel", + }, + []string{ + "exchange", // exchange name + "margin", // margin of connection. none, margin or isolated + "channel", // channel: user, market + "data_type", // type: balance, ticker, kline, orderbook, trade, order + "symbol", // for market data, trade and order + "currency", // for balance + }, + ) +) + +func init() { + prometheus.MustRegister( + metricsConnectionStatus, + metricsTotalBalances, + metricsLockedBalances, + metricsAvailableBalances, + metricsTradesTotal, + metricsTradingVolume, + metricsLastUpdateTimeBalance, + ) +} diff --git a/pkg/bbgo/moving_average_settings.go b/pkg/bbgo/moving_average_settings.go new file mode 100644 index 0000000000..24118f169c --- /dev/null +++ b/pkg/bbgo/moving_average_settings.go @@ -0,0 +1,46 @@ +package bbgo + +import ( + "fmt" + + "github.com/c9s/bbgo/pkg/types" +) + +type MovingAverageSettings struct { + Type string `json:"type"` + Interval types.Interval `json:"interval"` + Window int `json:"window"` + + Side *types.SideType `json:"side"` + + QuantityOrAmount +} + +func (settings MovingAverageSettings) IntervalWindow() types.IntervalWindow { + var window = 99 + if settings.Window > 0 { + window = settings.Window + } + + return types.IntervalWindow{ + Interval: settings.Interval, + Window: window, + } +} + +func (settings *MovingAverageSettings) Indicator(indicatorSet *StandardIndicatorSet) (inc types.Float64Indicator, err error) { + var iw = settings.IntervalWindow() + + switch settings.Type { + case "SMA": + inc = indicatorSet.SMA(iw) + + case "EWMA", "EMA": + inc = indicatorSet.EWMA(iw) + + default: + return nil, fmt.Errorf("unsupported moving average type: %s", settings.Type) + } + + return inc, nil +} diff --git a/pkg/bbgo/notification.go b/pkg/bbgo/notification.go new file mode 100644 index 0000000000..db63d74480 --- /dev/null +++ b/pkg/bbgo/notification.go @@ -0,0 +1,85 @@ +package bbgo + +import ( + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/util" +) + +var Notification = &Notifiability{ + SymbolChannelRouter: NewPatternChannelRouter(nil), + SessionChannelRouter: NewPatternChannelRouter(nil), + ObjectChannelRouter: NewObjectChannelRouter(), +} + +func Notify(obj interface{}, args ...interface{}) { + Notification.Notify(obj, args...) +} + +func NotifyTo(channel string, obj interface{}, args ...interface{}) { + Notification.NotifyTo(channel, obj, args...) +} + +type Notifier interface { + NotifyTo(channel string, obj interface{}, args ...interface{}) + Notify(obj interface{}, args ...interface{}) +} + +type NullNotifier struct{} + +func (n *NullNotifier) NotifyTo(channel string, obj interface{}, args ...interface{}) {} + +func (n *NullNotifier) Notify(obj interface{}, args ...interface{}) {} + +type Notifiability struct { + notifiers []Notifier + SessionChannelRouter *PatternChannelRouter `json:"-"` + SymbolChannelRouter *PatternChannelRouter `json:"-"` + ObjectChannelRouter *ObjectChannelRouter `json:"-"` +} + +// RouteSymbol routes symbol name to channel +func (m *Notifiability) RouteSymbol(symbol string) (channel string, ok bool) { + if m.SymbolChannelRouter != nil { + return m.SymbolChannelRouter.Route(symbol) + } + return "", false +} + +// RouteSession routes Session name to channel +func (m *Notifiability) RouteSession(session string) (channel string, ok bool) { + if m.SessionChannelRouter != nil { + return m.SessionChannelRouter.Route(session) + } + return "", false +} + +// RouteObject routes object to channel +func (m *Notifiability) RouteObject(obj interface{}) (channel string, ok bool) { + if m.ObjectChannelRouter != nil { + return m.ObjectChannelRouter.Route(obj) + } + return "", false +} + +// AddNotifier adds the notifier that implements the Notifier interface. +func (m *Notifiability) AddNotifier(notifier Notifier) { + m.notifiers = append(m.notifiers, notifier) +} + +func (m *Notifiability) Notify(obj interface{}, args ...interface{}) { + if str, ok := obj.(string); ok { + simpleArgs := util.FilterSimpleArgs(args) + logrus.Infof(str, simpleArgs...) + } + + for _, n := range m.notifiers { + n.Notify(obj, args...) + } +} + +func (m *Notifiability) NotifyTo(channel string, obj interface{}, args ...interface{}) { + for _, n := range m.notifiers { + n.NotifyTo(channel, obj, args...) + } +} diff --git a/pkg/bbgo/notifier.go b/pkg/bbgo/notifier.go deleted file mode 100644 index 0f6470a42e..0000000000 --- a/pkg/bbgo/notifier.go +++ /dev/null @@ -1,60 +0,0 @@ -package bbgo - -type Notifier interface { - NotifyTo(channel, format string, args ...interface{}) - Notify(format string, args ...interface{}) -} - -type NullNotifier struct{} - -func (n *NullNotifier) NotifyTo(channel, format string, args ...interface{}) {} - -func (n *NullNotifier) Notify(format string, args ...interface{}) {} - -type Notifiability struct { - notifiers []Notifier - SessionChannelRouter *PatternChannelRouter - SymbolChannelRouter *PatternChannelRouter - ObjectChannelRouter *ObjectChannelRouter -} - -// RouteSession routes symbol name to channel -func (m *Notifiability) RouteSymbol(symbol string) (channel string, ok bool) { - if m.SymbolChannelRouter != nil { - return m.SymbolChannelRouter.Route(symbol) - } - return "", false -} - -// RouteSession routes Session name to channel -func (m *Notifiability) RouteSession(session string) (channel string, ok bool) { - if m.SessionChannelRouter != nil { - return m.SessionChannelRouter.Route(session) - } - return "", false -} - -// RouteObject routes object to channel -func (m *Notifiability) RouteObject(obj interface{}) (channel string, ok bool) { - if m.ObjectChannelRouter != nil { - return m.ObjectChannelRouter.Route(obj) - } - return "", false -} - -// AddNotifier adds the notifier that implements the Notifier interface. -func (m *Notifiability) AddNotifier(notifier Notifier) { - m.notifiers = append(m.notifiers, notifier) -} - -func (m *Notifiability) Notify(format string, args ...interface{}) { - for _, n := range m.notifiers { - n.Notify(format, args...) - } -} - -func (m *Notifiability) NotifyTo(channel, format string, args ...interface{}) { - for _, n := range m.notifiers { - n.NotifyTo(channel, format, args...) - } -} diff --git a/pkg/bbgo/order_execution.go b/pkg/bbgo/order_execution.go index 127f1d8c9a..9423293416 100644 --- a/pkg/bbgo/order_execution.go +++ b/pkg/bbgo/order_execution.go @@ -3,10 +3,9 @@ package bbgo import ( "context" "fmt" - "math" "github.com/pkg/errors" - "github.com/sirupsen/logrus" + log "github.com/sirupsen/logrus" "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" @@ -14,29 +13,36 @@ import ( type OrderExecutor interface { SubmitOrders(ctx context.Context, orders ...types.SubmitOrder) (createdOrders types.OrderSlice, err error) + CancelOrders(ctx context.Context, orders ...types.Order) error OnTradeUpdate(cb func(trade types.Trade)) OnOrderUpdate(cb func(order types.Order)) + EmitTradeUpdate(trade types.Trade) + EmitOrderUpdate(order types.Order) } type OrderExecutionRouter interface { - // SubmitOrderTo submit order to a specific exchange Session + // SubmitOrdersTo submit order to a specific exchange Session SubmitOrdersTo(ctx context.Context, session string, orders ...types.SubmitOrder) (createdOrders types.OrderSlice, err error) + CancelOrdersTo(ctx context.Context, session string, orders ...types.Order) error } type ExchangeOrderExecutionRouter struct { - Notifiability - - sessions map[string]*ExchangeSession + sessions map[string]*ExchangeSession + executors map[string]OrderExecutor } func (e *ExchangeOrderExecutionRouter) SubmitOrdersTo(ctx context.Context, session string, orders ...types.SubmitOrder) (types.OrderSlice, error) { + if executor, ok := e.executors[session]; ok { + return executor.SubmitOrders(ctx, orders...) + } + es, ok := e.sessions[session] if !ok { - return nil, fmt.Errorf("exchange Session %s not found", session) + return nil, fmt.Errorf("exchange session %s not found", session) } - formattedOrders, err := formatOrders(es, orders) + formattedOrders, err := es.FormatOrders(orders) if err != nil { return nil, err } @@ -44,12 +50,26 @@ func (e *ExchangeOrderExecutionRouter) SubmitOrdersTo(ctx context.Context, sessi return es.Exchange.SubmitOrders(ctx, formattedOrders...) } +func (e *ExchangeOrderExecutionRouter) CancelOrdersTo(ctx context.Context, session string, orders ...types.Order) error { + if executor, ok := e.executors[session]; ok { + return executor.CancelOrders(ctx, orders...) + } + es, ok := e.sessions[session] + if !ok { + return fmt.Errorf("exchange session %s not found", session) + } + + return es.Exchange.CancelOrders(ctx, orders...) +} + // ExchangeOrderExecutor is an order executor wrapper for single exchange instance. //go:generate callbackgen -type ExchangeOrderExecutor type ExchangeOrderExecutor struct { - Notifiability `json:"-"` + // MinQuoteBalance fixedpoint.Value `json:"minQuoteBalance,omitempty" yaml:"minQuoteBalance,omitempty"` + + Notifiability `json:"-" yaml:"-"` - Session *ExchangeSession + Session *ExchangeSession `json:"-" yaml:"-"` // private trade update callbacks tradeUpdateCallbacks []func(trade types.Trade) @@ -63,15 +83,15 @@ func (e *ExchangeOrderExecutor) notifySubmitOrders(orders ...types.SubmitOrder) // pass submit order as an interface object. channel, ok := e.RouteObject(&order) if ok { - e.NotifyTo(channel, ":memo: Submitting %s %s %s order with quantity: %s at price: %s", order.Symbol, order.Type, order.Side, order.QuantityString, order.PriceString, &order) + NotifyTo(channel, ":memo: Submitting %s %s %s order with quantity: %f @ %f, order: %v", order.Symbol, order.Type, order.Side, order.Quantity.Float64(), order.Price.Float64(), &order) } else { - e.Notify(":memo: Submitting %s %s %s order with quantity: %s at price: %s", order.Symbol, order.Type, order.Side, order.QuantityString, order.PriceString, &order) + Notify(":memo: Submitting %s %s %s order with quantity: %f @ %f, order: %v", order.Symbol, order.Type, order.Side, order.Quantity.Float64(), order.Price.Float64(), &order) } } } func (e *ExchangeOrderExecutor) SubmitOrders(ctx context.Context, orders ...types.SubmitOrder) (types.OrderSlice, error) { - formattedOrders, err := formatOrders(e.Session, orders) + formattedOrders, err := e.Session.FormatOrders(orders) if err != nil { return nil, err } @@ -80,12 +100,12 @@ func (e *ExchangeOrderExecutor) SubmitOrders(ctx context.Context, orders ...type // pass submit order as an interface object. channel, ok := e.RouteObject(&order) if ok { - e.NotifyTo(channel, ":memo: Submitting %s %s %s order with quantity: %s", order.Symbol, order.Type, order.Side, order.QuantityString, order) + NotifyTo(channel, ":memo: Submitting %s %s %s order with quantity: %f, order: %v", order.Symbol, order.Type, order.Side, order.Quantity.Float64(), &order) } else { - e.Notify(":memo: Submitting %s %s %s order with quantity: %s", order.Symbol, order.Type, order.Side, order.QuantityString, order) + Notify(":memo: Submitting %s %s %s order with quantity: %f: %v", order.Symbol, order.Type, order.Side, order.Quantity.Float64(), &order) } - logrus.Infof("submitting order: %s", order.String()) + log.Infof("submitting order: %s", order.String()) } e.notifySubmitOrders(formattedOrders...) @@ -93,13 +113,20 @@ func (e *ExchangeOrderExecutor) SubmitOrders(ctx context.Context, orders ...type return e.Session.Exchange.SubmitOrders(ctx, formattedOrders...) } +func (e *ExchangeOrderExecutor) CancelOrders(ctx context.Context, orders ...types.Order) error { + for _, order := range orders { + log.Infof("cancelling order: %s", order) + } + return e.Session.Exchange.CancelOrders(ctx, orders...) +} + type BasicRiskController struct { - Logger *logrus.Logger + Logger *log.Logger - MaxOrderAmount fixedpoint.Value `json:"maxOrderAmount,omitempty"` - MinQuoteBalance fixedpoint.Value `json:"minQuoteBalance,omitempty"` - MaxBaseAssetBalance fixedpoint.Value `json:"maxBaseAssetBalance,omitempty"` - MinBaseAssetBalance fixedpoint.Value `json:"minBaseAssetBalance,omitempty"` + MaxOrderAmount fixedpoint.Value `json:"maxOrderAmount,omitempty" yaml:"maxOrderAmount,omitempty"` + MinQuoteBalance fixedpoint.Value `json:"minQuoteBalance,omitempty" yaml:"minQuoteBalance,omitempty"` + MaxBaseAssetBalance fixedpoint.Value `json:"maxBaseAssetBalance,omitempty" yaml:"maxBaseAssetBalance,omitempty"` + MinBaseAssetBalance fixedpoint.Value `json:"minBaseAssetBalance,omitempty" yaml:"minBaseAssetBalance,omitempty"` } // ProcessOrders filters and modifies the submit order objects by: @@ -107,14 +134,16 @@ type BasicRiskController struct { // 2. Decrease the quantity by risk controls // 3. If the quantity does not meet minimal requirement, we should ignore the submit order. func (c *BasicRiskController) ProcessOrders(session *ExchangeSession, orders ...types.SubmitOrder) (outOrders []types.SubmitOrder, errs []error) { - balances := session.Account.Balances() + balances := session.GetAccount().Balances() addError := func(err error) { errs = append(errs, err) } - accumulativeQuoteAmount := 0.0 - accumulativeBaseSellQuantity := 0.0 + accumulativeQuoteAmount := fixedpoint.Zero + accumulativeBaseSellQuantity := fixedpoint.Zero + increaseFactor := fixedpoint.NewFromFloat(1.01) + for _, order := range orders { lastPrice, ok := session.LastPrice(order.Symbol) if !ok { @@ -137,6 +166,7 @@ func (c *BasicRiskController) ProcessOrders(session *ExchangeSession, orders ... switch order.Side { case types.SideTypeBuy: + minAmount := market.MinAmount.Mul(increaseFactor) // Critical conditions for placing buy orders quoteBalance, ok := balances[market.QuoteCurrency] if !ok { @@ -144,67 +174,70 @@ func (c *BasicRiskController) ProcessOrders(session *ExchangeSession, orders ... continue } - if quoteBalance.Available < c.MinQuoteBalance { + if quoteBalance.Available.Compare(c.MinQuoteBalance) < 0 { addError(errors.Wrapf(ErrQuoteBalanceLevelTooLow, "can not place buy order, quote balance level is too low: %s < %s, order: %s", - types.USD.FormatMoneyFloat64(quoteBalance.Available.Float64()), - types.USD.FormatMoneyFloat64(c.MinQuoteBalance.Float64()), order.String())) + types.USD.FormatMoney(quoteBalance.Available), + types.USD.FormatMoney(c.MinQuoteBalance), order.String())) continue } // Increase the quantity if the amount is not enough, // this is the only increase op, later we will decrease the quantity if it meets the criteria - quantity = adjustQuantityByMinAmount(quantity, price, market.MinAmount*1.01) + quantity = AdjustFloatQuantityByMinAmount(quantity, price, minAmount) - if c.MaxOrderAmount > 0 { - quantity = adjustQuantityByMaxAmount(quantity, price, c.MaxOrderAmount.Float64()) + if c.MaxOrderAmount.Sign() > 0 { + quantity = AdjustFloatQuantityByMaxAmount(quantity, price, c.MaxOrderAmount) } - quoteAssetQuota := math.Max(0.0, quoteBalance.Available.Float64()-c.MinQuoteBalance.Float64()) - if quoteAssetQuota < market.MinAmount { + quoteAssetQuota := fixedpoint.Max( + fixedpoint.Zero, quoteBalance.Available.Sub(c.MinQuoteBalance)) + if quoteAssetQuota.Compare(market.MinAmount) < 0 { addError( errors.Wrapf( ErrInsufficientQuoteBalance, - "can not place buy order, insufficient quote balance: quota %f < min amount %f, order: %s", - quoteAssetQuota, market.MinAmount, order.String())) + "can not place buy order, insufficient quote balance: quota %s < min amount %s, order: %s", + quoteAssetQuota.String(), market.MinAmount.String(), order.String())) continue } - quantity = adjustQuantityByMaxAmount(quantity, price, quoteAssetQuota) + quantity = AdjustFloatQuantityByMaxAmount(quantity, price, quoteAssetQuota) // if MaxBaseAssetBalance is enabled, we should check the current base asset balance - if baseBalance, hasBaseAsset := balances[market.BaseCurrency]; hasBaseAsset && c.MaxBaseAssetBalance > 0 { - if baseBalance.Available > c.MaxBaseAssetBalance { + if baseBalance, hasBaseAsset := balances[market.BaseCurrency]; hasBaseAsset && c.MaxBaseAssetBalance.Sign() > 0 { + if baseBalance.Available.Compare(c.MaxBaseAssetBalance) > 0 { addError( errors.Wrapf( ErrAssetBalanceLevelTooHigh, - "should not place buy order, asset balance level is too high: %f > %f, order: %s", - baseBalance.Available.Float64(), - c.MaxBaseAssetBalance.Float64(), + "should not place buy order, asset balance level is too high: %s > %s, order: %s", + baseBalance.Available.String(), + c.MaxBaseAssetBalance.String(), order.String())) continue } - baseAssetQuota := math.Max(0.0, c.MaxBaseAssetBalance.Float64()-baseBalance.Available.Float64()) - if quantity > baseAssetQuota { + baseAssetQuota := fixedpoint.Max(fixedpoint.Zero, c.MaxBaseAssetBalance.Sub(baseBalance.Available)) + if quantity.Compare(baseAssetQuota) > 0 { quantity = baseAssetQuota } } // if the amount is still too small, we should skip it. - notional := quantity * lastPrice - if notional < market.MinAmount { + notional := quantity.Mul(lastPrice) + if notional.Compare(market.MinAmount) < 0 { addError( fmt.Errorf( - "can not place buy order, quote amount too small: notional %f < min amount %f, order: %s", - notional, - market.MinAmount, + "can not place buy order, quote amount too small: notional %s < min amount %s, order: %s", + notional.String(), + market.MinAmount.String(), order.String())) continue } - accumulativeQuoteAmount += notional + accumulativeQuoteAmount = accumulativeQuoteAmount.Add(notional) case types.SideTypeSell: + minNotion := market.MinNotional.Mul(increaseFactor) + // Critical conditions for placing SELL orders baseAssetBalance, ok := balances[market.BaseCurrency] if !ok { @@ -217,58 +250,58 @@ func (c *BasicRiskController) ProcessOrders(session *ExchangeSession, orders ... } // if the amount is too small, we should increase it. - quantity = adjustQuantityByMinAmount(quantity, price, market.MinNotional*1.01) + quantity = AdjustFloatQuantityByMinAmount(quantity, price, minNotion) // we should not SELL too much - quantity = math.Min(quantity, baseAssetBalance.Available.Float64()) + quantity = fixedpoint.Min(quantity, baseAssetBalance.Available) - if c.MinBaseAssetBalance > 0 { - if baseAssetBalance.Available < c.MinBaseAssetBalance { + if c.MinBaseAssetBalance.Sign() > 0 { + if baseAssetBalance.Available.Compare(c.MinBaseAssetBalance) < 0 { addError( errors.Wrapf( ErrAssetBalanceLevelTooLow, - "asset balance level is too low: %f > %f", baseAssetBalance.Available.Float64(), c.MinBaseAssetBalance.Float64())) + "asset balance level is too low: %s > %s", baseAssetBalance.Available.String(), c.MinBaseAssetBalance.String())) continue } - quantity = math.Min(quantity, baseAssetBalance.Available.Float64()-c.MinBaseAssetBalance.Float64()) - if quantity < market.MinQuantity { + quantity = fixedpoint.Min(quantity, baseAssetBalance.Available.Sub(c.MinBaseAssetBalance)) + if quantity.Compare(market.MinQuantity) < 0 { addError( errors.Wrapf( ErrInsufficientAssetBalance, - "insufficient asset balance: %f > minimal quantity %f", - baseAssetBalance.Available.Float64(), - market.MinQuantity)) + "insufficient asset balance: %s > minimal quantity %s", + baseAssetBalance.Available.String(), + market.MinQuantity.String())) continue } } - if c.MaxOrderAmount > 0 { - quantity = adjustQuantityByMaxAmount(quantity, price, c.MaxOrderAmount.Float64()) + if c.MaxOrderAmount.Sign() > 0 { + quantity = AdjustFloatQuantityByMaxAmount(quantity, price, c.MaxOrderAmount) } - notional := quantity * lastPrice - if notional < market.MinNotional { + notional := quantity.Mul(lastPrice) + if notional.Compare(market.MinNotional) < 0 { addError( fmt.Errorf( - "can not place sell order, notional %f < min notional: %f, order: %s", - notional, - market.MinNotional, + "can not place sell order, notional %s < min notional: %s, order: %s", + notional.String(), + market.MinNotional.String(), order.String())) continue } - if quantity < market.MinLot { + if quantity.Compare(market.MinQuantity) < 0 { addError( fmt.Errorf( - "can not place sell order, quantity %f is less than the minimal lot %f, order: %s", - quantity, - market.MinLot, + "can not place sell order, quantity %s is less than the minimal lot %s, order: %s", + quantity.String(), + market.MinQuantity.String(), order.String())) continue } - accumulativeBaseSellQuantity += quantity + accumulativeBaseSellQuantity = accumulativeBaseSellQuantity.Add(quantity) } // update quantity and format the order @@ -279,18 +312,6 @@ func (c *BasicRiskController) ProcessOrders(session *ExchangeSession, orders ... return outOrders, nil } -func formatOrders(session *ExchangeSession, orders []types.SubmitOrder) (formattedOrders []types.SubmitOrder, err error) { - for _, order := range orders { - o, err := session.FormatOrder(order) - if err != nil { - return formattedOrders, err - } - formattedOrders = append(formattedOrders, o) - } - - return formattedOrders, err -} - func max(a, b int64) int64 { if a > b { return a diff --git a/pkg/bbgo/order_executor_general.go b/pkg/bbgo/order_executor_general.go new file mode 100644 index 0000000000..751cddacb7 --- /dev/null +++ b/pkg/bbgo/order_executor_general.go @@ -0,0 +1,127 @@ +package bbgo + +import ( + "context" + + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +type NotifyFunc func(obj interface{}, args ...interface{}) + +// GeneralOrderExecutor implements the general order executor for strategy +type GeneralOrderExecutor struct { + session *ExchangeSession + symbol string + strategy string + strategyInstanceID string + position *types.Position + activeMakerOrders *ActiveOrderBook + orderStore *OrderStore + tradeCollector *TradeCollector +} + +func NewGeneralOrderExecutor(session *ExchangeSession, symbol, strategy, strategyInstanceID string, position *types.Position) *GeneralOrderExecutor { + // Always update the position fields + position.Strategy = strategy + position.StrategyInstanceID = strategyInstanceID + + orderStore := NewOrderStore(symbol) + return &GeneralOrderExecutor{ + session: session, + symbol: symbol, + strategy: strategy, + strategyInstanceID: strategyInstanceID, + position: position, + activeMakerOrders: NewActiveOrderBook(symbol), + orderStore: orderStore, + tradeCollector: NewTradeCollector(symbol, position, orderStore), + } +} + +func (e *GeneralOrderExecutor) BindEnvironment(environ *Environment) { + e.tradeCollector.OnProfit(func(trade types.Trade, profit *types.Profit) { + environ.RecordPosition(e.position, trade, profit) + }) +} + +func (e *GeneralOrderExecutor) BindTradeStats(tradeStats *types.TradeStats) { + e.tradeCollector.OnProfit(func(trade types.Trade, profit *types.Profit) { + if profit == nil { + return + } + tradeStats.Add(profit.Profit) + }) +} + +func (e *GeneralOrderExecutor) BindProfitStats(profitStats *types.ProfitStats) { + e.tradeCollector.OnProfit(func(trade types.Trade, profit *types.Profit) { + profitStats.AddTrade(trade) + if profit == nil { + return + } + + profitStats.AddProfit(*profit) + Notify(&profitStats) + }) +} + +func (e *GeneralOrderExecutor) Bind() { + e.activeMakerOrders.BindStream(e.session.UserDataStream) + e.orderStore.BindStream(e.session.UserDataStream) + + // trade notify + e.tradeCollector.OnTrade(func(trade types.Trade, profit, netProfit fixedpoint.Value) { + Notify(trade) + }) + + e.tradeCollector.OnPositionUpdate(func(position *types.Position) { + log.Infof("position changed: %s", position) + Notify(position) + }) + + e.tradeCollector.BindStream(e.session.UserDataStream) +} + +func (e *GeneralOrderExecutor) SubmitOrders(ctx context.Context, submitOrders ...types.SubmitOrder) (types.OrderSlice, error) { + formattedOrders, err := e.session.FormatOrders(submitOrders) + if err != nil { + return nil, err + } + + createdOrders, err := e.session.Exchange.SubmitOrders(ctx, formattedOrders...) + if err != nil { + log.WithError(err).Errorf("can not place orders") + } + + e.orderStore.Add(createdOrders...) + e.activeMakerOrders.Add(createdOrders...) + e.tradeCollector.Process() + return createdOrders, err +} + +func (e *GeneralOrderExecutor) GracefulCancel(ctx context.Context) error { + if err := e.activeMakerOrders.GracefulCancel(ctx, e.session.Exchange); err != nil { + log.WithError(err).Errorf("graceful cancel order error") + return err + } + + e.tradeCollector.Process() + return nil +} + +func (e *GeneralOrderExecutor) ClosePosition(ctx context.Context, percentage fixedpoint.Value) error { + submitOrder := e.position.NewMarketCloseOrder(percentage) + if submitOrder == nil { + return nil + } + + _, err := e.SubmitOrders(ctx, *submitOrder) + return err +} + +func (e *GeneralOrderExecutor) TradeCollector() *TradeCollector { + return e.tradeCollector +} diff --git a/pkg/bbgo/order_processor.go b/pkg/bbgo/order_processor.go index 07fd9c773b..f61f134617 100644 --- a/pkg/bbgo/order_processor.go +++ b/pkg/bbgo/order_processor.go @@ -1,6 +1,7 @@ package bbgo import ( + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/pkg/errors" ) @@ -13,23 +14,47 @@ var ( ErrAssetBalanceLevelTooHigh = errors.New("asset balance level too high") ) -// adjustQuantityByMinAmount adjusts the quantity to make the amount greater than the given minAmount -func adjustQuantityByMinAmount(quantity, currentPrice, minAmount float64) float64 { +// AdjustQuantityByMaxAmount adjusts the quantity to make the amount greater than the given minAmount +func AdjustQuantityByMaxAmount(quantity, currentPrice, maxAmount fixedpoint.Value) fixedpoint.Value { // modify quantity for the min amount - amount := currentPrice * quantity - if amount < minAmount { - ratio := minAmount / amount - quantity *= ratio + amount := currentPrice.Mul(quantity) + if amount.Compare(maxAmount) < 0 { + return quantity + } + + ratio := maxAmount.Div(amount) + return quantity.Mul(ratio) +} + +// AdjustQuantityByMinAmount adjusts the quantity to make the amount greater than the given minAmount +func AdjustQuantityByMinAmount(quantity, currentPrice, minAmount fixedpoint.Value) fixedpoint.Value { + // modify quantity for the min amount + amount := currentPrice.Mul(quantity) + if amount.Compare(minAmount) < 0 { + ratio := minAmount.Div(amount) + quantity = quantity.Mul(ratio) + } + + return quantity +} + +// AdjustFloatQuantityByMinAmount adjusts the quantity to make the amount greater than the given minAmount +func AdjustFloatQuantityByMinAmount(quantity, currentPrice, minAmount fixedpoint.Value) fixedpoint.Value { + // modify quantity for the min amount + amount := currentPrice.Mul(quantity) + if amount.Compare(minAmount) < 0 { + ratio := minAmount.Div(amount) + return quantity.Mul(ratio) } return quantity } -func adjustQuantityByMaxAmount(quantity float64, price float64, maxAmount float64) float64 { - amount := price * quantity - if amount > maxAmount { - ratio := maxAmount / amount - quantity *= ratio +func AdjustFloatQuantityByMaxAmount(quantity fixedpoint.Value, price fixedpoint.Value, maxAmount fixedpoint.Value) fixedpoint.Value { + amount := price.Mul(quantity) + if amount.Compare(maxAmount) > 0 { + ratio := maxAmount.Div(amount) + return quantity.Mul(ratio) } return quantity diff --git a/pkg/bbgo/order_processor_test.go b/pkg/bbgo/order_processor_test.go index beb19d9948..9976548689 100644 --- a/pkg/bbgo/order_processor_test.go +++ b/pkg/bbgo/order_processor_test.go @@ -3,41 +3,54 @@ package bbgo import ( "testing" + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/stretchr/testify/assert" ) func TestAdjustQuantityByMinAmount(t *testing.T) { type args struct { - quantity, price, minAmount float64 + quantity, price, minAmount fixedpoint.Value } type testcase struct { name string args args - wanted float64 + wanted string } tests := []testcase{ { - name: "amount too small", - args: args{0.1, 10.0, 10.0}, - wanted: 1.0, + name: "amount too small", + args: args{ + fixedpoint.MustNewFromString("0.1"), + fixedpoint.MustNewFromString("10.0"), + fixedpoint.MustNewFromString("10.0"), + }, + wanted: "1.0", }, { - name: "amount equals to min amount", - args: args{1.0, 10.0, 10.0}, - wanted: 1.0, + name: "amount equals to min amount", + args: args{ + fixedpoint.MustNewFromString("1.0"), + fixedpoint.MustNewFromString("10.0"), + fixedpoint.MustNewFromString("10.0"), + }, + wanted: "1.0", }, { - name: "amount is greater than min amount", - args: args{2.0, 10.0, 10.0}, - wanted: 2.0, + name: "amount is greater than min amount", + args: args{ + fixedpoint.MustNewFromString("2.0"), + fixedpoint.MustNewFromString("10.0"), + fixedpoint.MustNewFromString("10.0"), + }, + wanted: "2.0", }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - q := adjustQuantityByMinAmount(test.args.quantity, test.args.price, test.args.minAmount) - assert.Equal(t, test.wanted, q) + q := AdjustFloatQuantityByMinAmount(test.args.quantity, test.args.price, test.args.minAmount) + assert.Equal(t, fixedpoint.MustNewFromString(test.wanted), q) }) } } diff --git a/pkg/bbgo/order_store.go b/pkg/bbgo/order_store.go index b9e81bed95..46e4911c7f 100644 --- a/pkg/bbgo/order_store.go +++ b/pkg/bbgo/order_store.go @@ -13,6 +13,8 @@ type OrderStore struct { Symbol string RemoveCancelled bool + RemoveFilled bool + AddOrderUpdate bool } func NewOrderStore(symbol string) *OrderStore { @@ -22,6 +24,48 @@ func NewOrderStore(symbol string) *OrderStore { } } +func (s *OrderStore) AllFilled() bool { + s.mu.Lock() + defer s.mu.Unlock() + + // If any order is new or partially filled, we return false + for _, o := range s.orders { + switch o.Status { + + case types.OrderStatusCanceled, types.OrderStatusRejected: + continue + + case types.OrderStatusNew, types.OrderStatusPartiallyFilled: + return false + + case types.OrderStatusFilled: + // do nothing for the filled order + + } + } + + // If we pass through the for loop, then all the orders filled + return true +} + +func (s *OrderStore) NumOfOrders() (num int) { + s.mu.Lock() + num = len(s.orders) + s.mu.Unlock() + return num +} + +func (s *OrderStore) Orders() (orders []types.Order) { + s.mu.Lock() + defer s.mu.Unlock() + + for _, o := range s.orders { + orders = append(orders, o) + } + + return orders +} + func (s *OrderStore) Exists(oID uint64) (ok bool) { s.mu.Lock() defer s.mu.Unlock() @@ -30,6 +74,16 @@ func (s *OrderStore) Exists(oID uint64) (ok bool) { return ok } +// Get a single order from the order store by order ID +// Should check ok to make sure the order is returned successfully +func (s *OrderStore) Get(oID uint64) (order types.Order, ok bool) { + s.mu.Lock() + defer s.mu.Unlock() + + order, ok = s.orders[oID] + return order, ok +} + func (s *OrderStore) Add(orders ...types.Order) { s.mu.Lock() defer s.mu.Unlock() @@ -60,25 +114,34 @@ func (s *OrderStore) Update(o types.Order) bool { func (s *OrderStore) BindStream(stream types.Stream) { hasSymbol := s.Symbol != "" stream.OnOrderUpdate(func(order types.Order) { - if hasSymbol { - if order.Symbol != s.Symbol { - return - } - s.handleOrderUpdate(order) - } else { - s.handleOrderUpdate(order) + // if we have symbol defined, we should filter out the orders that we are not interested in + if hasSymbol && order.Symbol != s.Symbol { + return } + + s.handleOrderUpdate(order) }) } func (s *OrderStore) handleOrderUpdate(order types.Order) { switch order.Status { - case types.OrderStatusPartiallyFilled, types.OrderStatusNew, types.OrderStatusFilled: - s.Update(order) + + case types.OrderStatusNew, types.OrderStatusPartiallyFilled, types.OrderStatusFilled: + if s.AddOrderUpdate { + s.Add(order) + } else { + s.Update(order) + } + + if s.RemoveFilled && order.Status == types.OrderStatusFilled { + s.Remove(order) + } case types.OrderStatusCanceled: if s.RemoveCancelled { s.Remove(order) + } else if order.ExecutedQuantity.IsZero() { + s.Remove(order) } case types.OrderStatusRejected: diff --git a/pkg/bbgo/persistence.go b/pkg/bbgo/persistence.go index 3f6d5d0e9a..2b1be9f83d 100644 --- a/pkg/bbgo/persistence.go +++ b/pkg/bbgo/persistence.go @@ -1,6 +1,13 @@ package bbgo -import "fmt" +import ( + "fmt" + "reflect" + + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/service" +) type PersistenceSelector struct { // StoreID is the store you want to use. @@ -10,61 +17,124 @@ type PersistenceSelector struct { Type string `json:"type" yaml:"type"` } +var DefaultPersistenceServiceFacade = &service.PersistenceServiceFacade{ + Memory: service.NewMemoryService(), +} + +var PersistenceServiceFacade = DefaultPersistenceServiceFacade + // Persistence is used for strategy to inject the persistence. type Persistence struct { PersistenceSelector *PersistenceSelector `json:"persistence,omitempty" yaml:"persistence,omitempty"` - - Facade *PersistenceServiceFacade `json:"-" yaml:"-"` } -func (p *Persistence) backendService(t string) (service PersistenceService, err error) { +func (p *Persistence) backendService(t string) (service.PersistenceService, error) { switch t { case "json": - service = p.Facade.Json + return PersistenceServiceFacade.Json, nil case "redis": - service = p.Facade.Redis + if PersistenceServiceFacade.Redis == nil { + log.Warn("redis persistence is not available, fallback to memory backend") + return PersistenceServiceFacade.Memory, nil + } + return PersistenceServiceFacade.Redis, nil case "memory": - service = p.Facade.Memory + return PersistenceServiceFacade.Memory, nil - default: - err = fmt.Errorf("unsupported persistent type %s", t) } - return service, err + return nil, fmt.Errorf("unsupported persistent type %s", t) } func (p *Persistence) Load(val interface{}, subIDs ...string) error { - service, err := p.backendService(p.PersistenceSelector.Type) + ps, err := p.backendService(p.PersistenceSelector.Type) if err != nil { return err } + log.Debugf("using persistence store %T for loading", ps) + if p.PersistenceSelector.StoreID == "" { - return fmt.Errorf("persistence.store can not be empty") + p.PersistenceSelector.StoreID = "default" } - store := service.NewStore(p.PersistenceSelector.StoreID, subIDs...) + store := ps.NewStore(p.PersistenceSelector.StoreID, subIDs...) return store.Load(val) } func (p *Persistence) Save(val interface{}, subIDs ...string) error { - service, err := p.backendService(p.PersistenceSelector.Type) + ps, err := p.backendService(p.PersistenceSelector.Type) if err != nil { return err } + log.Debugf("using persistence store %T for storing", ps) + if p.PersistenceSelector.StoreID == "" { - return fmt.Errorf("persistence.store can not be empty") + p.PersistenceSelector.StoreID = "default" } - store := service.NewStore(p.PersistenceSelector.StoreID, subIDs...) + store := ps.NewStore(p.PersistenceSelector.StoreID, subIDs...) return store.Save(val) } -type PersistenceServiceFacade struct { - Redis *RedisPersistenceService - Json *JsonPersistenceService - Memory *MemoryService +func (p *Persistence) Sync(obj interface{}) error { + id := callID(obj) + if len(id) == 0 { + return nil + } + + ps := PersistenceServiceFacade.Get() + return storePersistenceFields(obj, id, ps) +} + +// Sync syncs the object properties into the persistence layer +func Sync(obj interface{}) error { + id := callID(obj) + if len(id) == 0 { + return nil + } + + ps := PersistenceServiceFacade.Get() + return storePersistenceFields(obj, id, ps) +} + +func loadPersistenceFields(obj interface{}, id string, persistence service.PersistenceService) error { + return iterateFieldsByTag(obj, "persistence", func(tag string, field reflect.StructField, value reflect.Value) error { + log.Debugf("[loadPersistenceFields] loading value into field %v, tag = %s, original value = %v", field, tag, value) + + newValueInf := newTypeValueInterface(value.Type()) + // inf := value.Interface() + store := persistence.NewStore("state", id, tag) + if err := store.Load(&newValueInf); err != nil { + if err == service.ErrPersistenceNotExists { + log.Debugf("[loadPersistenceFields] state key does not exist, id = %v, tag = %s", id, tag) + return nil + } + + return err + } + + newValue := reflect.ValueOf(newValueInf) + if value.Kind() != reflect.Ptr && newValue.Kind() == reflect.Ptr { + newValue = newValue.Elem() + } + + log.Debugf("[loadPersistenceFields] %v = %v -> %v\n", field, value, newValue) + + value.Set(newValue) + return nil + }) +} + +func storePersistenceFields(obj interface{}, id string, persistence service.PersistenceService) error { + return iterateFieldsByTag(obj, "persistence", func(tag string, ft reflect.StructField, fv reflect.Value) error { + log.Debugf("[storePersistenceFields] storing value from field %v, tag = %s, original value = %v", ft, tag, fv) + + inf := fv.Interface() + store := persistence.NewStore("state", id, tag) + return store.Save(inf) + }) } diff --git a/pkg/bbgo/persistence_test.go b/pkg/bbgo/persistence_test.go new file mode 100644 index 0000000000..c58cd9eda6 --- /dev/null +++ b/pkg/bbgo/persistence_test.go @@ -0,0 +1,136 @@ +package bbgo + +import ( + "os" + "reflect" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/service" + "github.com/c9s/bbgo/pkg/types" +) + +type TestStruct struct { + *Environment + *Graceful + + Position *types.Position `persistence:"position"` + Integer int64 `persistence:"integer"` + Integer2 int64 `persistence:"integer2"` + Float int64 `persistence:"float"` + String string `persistence:"string"` +} + +func (t *TestStruct) InstanceID() string { + return "test-struct" +} + +func preparePersistentServices() []service.PersistenceService { + mem := service.NewMemoryService() + jsonDir := &service.JsonPersistenceService{Directory: "testoutput/persistence"} + pss := []service.PersistenceService{ + mem, + jsonDir, + } + + if _, ok := os.LookupEnv("TEST_REDIS"); ok { + redisP := service.NewRedisPersistenceService(&service.RedisPersistenceConfig{ + Host: "localhost", + Port: "6379", + DB: 0, + }) + pss = append(pss, redisP) + } + + return pss +} + +func Test_callID(t *testing.T) { + id := callID(&TestStruct{}) + assert.NotEmpty(t, id) +} + +func Test_loadPersistenceFields(t *testing.T) { + var pss = preparePersistentServices() + + for _, ps := range pss { + psName := reflect.TypeOf(ps).Elem().String() + t.Run(psName+"/empty", func(t *testing.T) { + b := &TestStruct{} + err := loadPersistenceFields(b, "test-empty", ps) + assert.NoError(t, err) + }) + + t.Run(psName+"/nil", func(t *testing.T) { + var b *TestStruct = nil + err := loadPersistenceFields(b, "test-nil", ps) + assert.Equal(t, errCanNotIterateNilPointer, err) + }) + + t.Run(psName+"/pointer-field", func(t *testing.T) { + var a = &TestStruct{ + Position: types.NewPosition("BTCUSDT", "BTC", "USDT"), + } + a.Position.Base = fixedpoint.NewFromFloat(10.0) + a.Position.AverageCost = fixedpoint.NewFromFloat(3343.0) + err := storePersistenceFields(a, "pointer-field-test", ps) + assert.NoError(t, err) + + b := &TestStruct{} + err = loadPersistenceFields(b, "pointer-field-test", ps) + assert.NoError(t, err) + + assert.Equal(t, "10", a.Position.Base.String()) + assert.Equal(t, "3343", a.Position.AverageCost.String()) + }) + } +} + +func Test_storePersistenceFields(t *testing.T) { + var pss = preparePersistentServices() + + var a = &TestStruct{ + Integer: 1, + Integer2: 2, + Float: 3.0, + String: "foobar", + Position: types.NewPosition("BTCUSDT", "BTC", "USDT"), + } + + a.Position.Base = fixedpoint.NewFromFloat(10.0) + a.Position.AverageCost = fixedpoint.NewFromFloat(3343.0) + + for _, ps := range pss { + psName := reflect.TypeOf(ps).Elem().String() + t.Run("all/"+psName, func(t *testing.T) { + id := callID(a) + err := storePersistenceFields(a, id, ps) + assert.NoError(t, err) + + var i int64 + store := ps.NewStore("state", "test-struct", "integer") + err = store.Load(&i) + assert.NoError(t, err) + assert.Equal(t, int64(1), i) + + var p *types.Position + store = ps.NewStore("state", "test-struct", "position") + err = store.Load(&p) + assert.NoError(t, err) + assert.Equal(t, fixedpoint.NewFromFloat(10.0), p.Base) + assert.Equal(t, fixedpoint.NewFromFloat(3343.0), p.AverageCost) + + var b = &TestStruct{} + err = loadPersistenceFields(b, id, ps) + assert.NoError(t, err) + assert.Equal(t, a.Integer, b.Integer) + assert.Equal(t, a.Integer2, b.Integer2) + assert.Equal(t, a.Float, b.Float) + assert.Equal(t, a.String, b.String) + assert.Equal(t, a.Position, b.Position) + }) + } + +} diff --git a/pkg/bbgo/position.go b/pkg/bbgo/position.go deleted file mode 100644 index 7dfe9a10d5..0000000000 --- a/pkg/bbgo/position.go +++ /dev/null @@ -1,105 +0,0 @@ -package bbgo - -import ( - "github.com/c9s/bbgo/pkg/fixedpoint" - "github.com/c9s/bbgo/pkg/types" -) - -type Position struct { - Symbol string `json:"symbol"` - BaseCurrency string `json:"baseCurrency"` - QuoteCurrency string `json:"quoteCurrency"` - - Base fixedpoint.Value `json:"base"` - Quote fixedpoint.Value `json:"quote"` - AverageCost fixedpoint.Value `json:"averageCost"` -} - -func (p *Position) BindStream(stream types.Stream) { - stream.OnTradeUpdate(func(trade types.Trade) { - if p.Symbol == trade.Symbol { - p.AddTrade(trade) - } - }) -} - -func (p *Position) AddTrades(trades []types.Trade) (fixedpoint.Value, bool) { - var totalProfitAmount fixedpoint.Value - for _, trade := range trades { - if profitAmount, profit := p.AddTrade(trade); profit { - totalProfitAmount += profitAmount - } - } - - return totalProfitAmount, totalProfitAmount != 0 -} - -func (p *Position) AddTrade(t types.Trade) (fixedpoint.Value, bool) { - price := fixedpoint.NewFromFloat(t.Price) - quantity := fixedpoint.NewFromFloat(t.Quantity) - quoteQuantity := fixedpoint.NewFromFloat(t.QuoteQuantity) - fee := fixedpoint.NewFromFloat(t.Fee) - - switch t.FeeCurrency { - - case p.BaseCurrency: - quantity -= fee - - case p.QuoteCurrency: - quoteQuantity -= fee - - } - - // Base > 0 means we're in long position - // Base < 0 means we're in short position - switch t.Side { - - case types.SideTypeBuy: - if p.Base < 0 { - // handling short-to-long position - if p.Base+quantity > 0 { - closingProfit := (p.AverageCost - price).Mul(-p.Base) - p.Base += quantity - p.Quote -= quoteQuantity - p.AverageCost = price - return closingProfit, true - } else { - // covering short position - p.Base += quantity - p.Quote -= quoteQuantity - return (p.AverageCost - price).Mul(quantity), true - } - } - - p.AverageCost = (p.AverageCost.Mul(p.Base) + quoteQuantity).Div(p.Base + quantity) - p.Base += quantity - p.Quote -= quoteQuantity - - return 0, false - - case types.SideTypeSell: - if p.Base > 0 { - // long-to-short - if p.Base-quantity < 0 { - closingProfit := (price - p.AverageCost).Mul(p.Base) - p.Base -= quantity - p.Quote += quoteQuantity - p.AverageCost = price - return closingProfit, true - } else { - p.Base -= quantity - p.Quote += quoteQuantity - return (price - p.AverageCost).Mul(quantity), true - } - } - - // handling short position - p.AverageCost = (p.AverageCost.Mul(-p.Base) + quoteQuantity).Div(-p.Base + quantity) - p.Base -= quantity - p.Quote += quoteQuantity - - return 0, false - } - - return 0, false -} diff --git a/pkg/bbgo/position_test.go b/pkg/bbgo/position_test.go deleted file mode 100644 index dd38367015..0000000000 --- a/pkg/bbgo/position_test.go +++ /dev/null @@ -1,176 +0,0 @@ -package bbgo - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/c9s/bbgo/pkg/fixedpoint" - "github.com/c9s/bbgo/pkg/types" -) - -func TestPosition(t *testing.T) { - var feeRate = 0.05 * 0.01 - var testcases = []struct { - name string - trades []types.Trade - expectedAverageCost fixedpoint.Value - expectedBase fixedpoint.Value - expectedQuote fixedpoint.Value - expectedProfit fixedpoint.Value - }{ - { - name: "base fee", - trades: []types.Trade{ - { - Side: types.SideTypeBuy, - Price: 1000.0, - Quantity: 0.01, - QuoteQuantity: 1000.0 * 0.01, - Fee: 0.01 * 0.05 * 0.01, // 0.05% - FeeCurrency: "BTC", - }, - }, - expectedAverageCost: fixedpoint.NewFromFloat((1000.0 * 0.01) / (0.01 * (1.0 - feeRate))), - expectedBase: fixedpoint.NewFromFloat(0.01 - (0.01 * feeRate)), - expectedQuote: fixedpoint.NewFromFloat(0 - 1000.0*0.01), - expectedProfit: fixedpoint.NewFromFloat(0.0), - }, - { - name: "quote fee", - trades: []types.Trade{ - { - Side: types.SideTypeSell, - Price: 1000.0, - Quantity: 0.01, - QuoteQuantity: 1000.0 * 0.01, - Fee: (1000.0 * 0.01) * feeRate, // 0.05% - FeeCurrency: "USDT", - }, - }, - expectedAverageCost: fixedpoint.NewFromFloat((1000.0 * 0.01 * (1.0 - feeRate)) / 0.01), - expectedBase: fixedpoint.NewFromFloat(-0.01), - expectedQuote: fixedpoint.NewFromFloat(0 + 1000.0*0.01*(1.0-feeRate)), - expectedProfit: fixedpoint.NewFromFloat(0.0), - }, - { - name: "long", - trades: []types.Trade{ - { - Side: types.SideTypeBuy, - Price: 1000.0, - Quantity: 0.01, - QuoteQuantity: 1000.0 * 0.01, - }, - { - Side: types.SideTypeBuy, - Price: 2000.0, - Quantity: 0.03, - QuoteQuantity: 2000.0 * 0.03, - }, - }, - expectedAverageCost: fixedpoint.NewFromFloat((1000.0*0.01 + 2000.0*0.03) / 0.04), - expectedBase: fixedpoint.NewFromFloat(0.01 + 0.03), - expectedQuote: fixedpoint.NewFromFloat(0 - 1000.0*0.01 - 2000.0*0.03), - expectedProfit: fixedpoint.NewFromFloat(0.0), - }, - - { - name: "long and sell", - trades: []types.Trade{ - { - Side: types.SideTypeBuy, - Price: 1000.0, - Quantity: 0.01, - QuoteQuantity: 1000.0 * 0.01, - }, - { - Side: types.SideTypeBuy, - Price: 2000.0, - Quantity: 0.03, - QuoteQuantity: 2000.0 * 0.03, - }, - { - Side: types.SideTypeSell, - Price: 3000.0, - Quantity: 0.01, - QuoteQuantity: 3000.0 * 0.01, - }, - }, - expectedAverageCost: fixedpoint.NewFromFloat((1000.0*0.01 + 2000.0*0.03) / 0.04), - expectedBase: fixedpoint.NewFromFloat(0.03), - expectedQuote: fixedpoint.NewFromFloat(0 - 1000.0*0.01 - 2000.0*0.03 + 3000.0*0.01), - expectedProfit: fixedpoint.NewFromFloat((3000.0 - (1000.0*0.01+2000.0*0.03)/0.04) * 0.01), - }, - - { - name: "long and sell to short", - trades: []types.Trade{ - { - Side: types.SideTypeBuy, - Price: 1000.0, - Quantity: 0.01, - QuoteQuantity: 1000.0 * 0.01, - }, - { - Side: types.SideTypeBuy, - Price: 2000.0, - Quantity: 0.03, - QuoteQuantity: 2000.0 * 0.03, - }, - { - Side: types.SideTypeSell, - Price: 3000.0, - Quantity: 0.10, - QuoteQuantity: 3000.0 * 0.10, - }, - }, - - expectedAverageCost: fixedpoint.NewFromFloat(3000.0), - expectedBase: fixedpoint.NewFromFloat(-0.06), - expectedQuote: fixedpoint.NewFromFloat(-1000.0*0.01 - 2000.0*0.03 + 3000.0*0.1), - expectedProfit: fixedpoint.NewFromFloat((3000.0 - (1000.0*0.01+2000.0*0.03)/0.04) * 0.04), - }, - - { - name: "short", - trades: []types.Trade{ - { - Side: types.SideTypeSell, - Price: 2000.0, - Quantity: 0.01, - QuoteQuantity: 2000.0 * 0.01, - }, - { - Side: types.SideTypeSell, - Price: 3000.0, - Quantity: 0.03, - QuoteQuantity: 3000.0 * 0.03, - }, - }, - - expectedAverageCost: fixedpoint.NewFromFloat((2000.0*0.01 + 3000.0*0.03) / (0.01 + 0.03)), - expectedBase: fixedpoint.NewFromFloat(0 - 0.01 - 0.03), - expectedQuote: fixedpoint.NewFromFloat(2000.0*0.01 + 3000.0*0.03), - expectedProfit: fixedpoint.NewFromFloat(0.0), - }, - } - - for _, testcase := range testcases { - t.Run(testcase.name, func(t *testing.T) { - pos := Position{ - Symbol: "BTCUSDT", - BaseCurrency: "BTC", - QuoteCurrency: "USDT", - } - profitAmount, profit := pos.AddTrades(testcase.trades) - - assert.Equal(t, testcase.expectedQuote, pos.Quote, "expectedQuote") - assert.Equal(t, testcase.expectedBase, pos.Base, "expectedBase") - assert.Equal(t, testcase.expectedAverageCost, pos.AverageCost, "expectedAverageCost") - if profit { - assert.Equal(t, testcase.expectedProfit, profitAmount, "expectedProfit") - } - }) - } -} diff --git a/pkg/bbgo/profitstats.go b/pkg/bbgo/profitstats.go new file mode 100644 index 0000000000..f30d11b655 --- /dev/null +++ b/pkg/bbgo/profitstats.go @@ -0,0 +1 @@ +package bbgo diff --git a/pkg/bbgo/quantity_amount.go b/pkg/bbgo/quantity_amount.go new file mode 100644 index 0000000000..58b238acae --- /dev/null +++ b/pkg/bbgo/quantity_amount.go @@ -0,0 +1,40 @@ +package bbgo + +import ( + "errors" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +// QuantityOrAmount is a setting structure used for quantity/amount settings +// You can embed this struct into your strategy to share the setting methods +type QuantityOrAmount struct { + // Quantity is the base order quantity for your buy/sell order. + // when quantity is set, the amount option will be not used. + Quantity fixedpoint.Value `json:"quantity"` + + // Amount is the order quote amount for your buy/sell order. + Amount fixedpoint.Value `json:"amount,omitempty"` +} + +func (qa *QuantityOrAmount) IsSet() bool { + return qa.Quantity.Sign() > 0 || qa.Amount.Sign() > 0 +} + +func (qa *QuantityOrAmount) Validate() error { + if qa.Quantity.IsZero() && qa.Amount.IsZero() { + return errors.New("either quantity or amount can not be empty") + } + return nil +} + +// CalculateQuantity calculates the equivalent quantity of the given price when amount is set +// it returns the quantity if the quantity is set +func (qa *QuantityOrAmount) CalculateQuantity(currentPrice fixedpoint.Value) fixedpoint.Value { + if qa.Amount.Sign() > 0 { + quantity := qa.Amount.Div(currentPrice) + return quantity + } + + return qa.Quantity +} diff --git a/pkg/bbgo/quota.go b/pkg/bbgo/quota.go index 377774da4f..e4773d9af9 100644 --- a/pkg/bbgo/quota.go +++ b/pkg/bbgo/quota.go @@ -14,18 +14,18 @@ type Quota struct { func (q *Quota) Add(fund fixedpoint.Value) { q.mu.Lock() - q.Available += fund + q.Available = q.Available.Add(fund) q.mu.Unlock() } func (q *Quota) Lock(fund fixedpoint.Value) bool { - if fund > q.Available { + if fund.Compare(q.Available) > 0 { return false } q.mu.Lock() - q.Available -= fund - q.Locked += fund + q.Available = q.Available.Sub(fund) + q.Locked = q.Locked.Add(fund) q.mu.Unlock() return true @@ -33,14 +33,14 @@ func (q *Quota) Lock(fund fixedpoint.Value) bool { func (q *Quota) Commit() { q.mu.Lock() - q.Locked = 0 + q.Locked = fixedpoint.Zero q.mu.Unlock() } func (q *Quota) Rollback() { q.mu.Lock() - q.Available += q.Locked - q.Locked = 0 + q.Available = q.Available.Add(q.Locked) + q.Locked = fixedpoint.Zero q.mu.Unlock() } diff --git a/pkg/bbgo/redis_persistence.go b/pkg/bbgo/redis_persistence.go deleted file mode 100644 index 53f80d4fd1..0000000000 --- a/pkg/bbgo/redis_persistence.go +++ /dev/null @@ -1,209 +0,0 @@ -package bbgo - -import ( - "context" - "encoding/json" - "io/ioutil" - "net" - "os" - "path/filepath" - "reflect" - "strings" - - "github.com/go-redis/redis/v8" - "github.com/pkg/errors" -) - -var ErrPersistenceNotExists = errors.New("persistent data does not exists") - -type PersistenceService interface { - NewStore(id string, subIDs ...string) Store -} - -type Store interface { - Load(val interface{}) error - Save(val interface{}) error - Reset() error -} - -type MemoryService struct { - Slots map[string]interface{} -} - -func NewMemoryService() *MemoryService { - return &MemoryService{ - Slots: make(map[string]interface{}), - } -} - -func (s *MemoryService) NewStore(id string, subIDs ...string) Store { - key := strings.Join(append([]string{id}, subIDs...), ":") - return &MemoryStore{ - Key: key, - memory: s, - } -} - -type MemoryStore struct { - Key string - memory *MemoryService -} - -func (store *MemoryStore) Save(val interface{}) error { - store.memory.Slots[store.Key] = val - return nil -} - -func (store *MemoryStore) Load(val interface{}) error { - v := reflect.ValueOf(val) - if data, ok := store.memory.Slots[store.Key]; ok { - v.Elem().Set(reflect.ValueOf(data).Elem()) - } else { - return ErrPersistenceNotExists - } - - return nil -} - -func (store *MemoryStore) Reset() error { - delete(store.memory.Slots, store.Key) - return nil -} - -type JsonPersistenceService struct { - Directory string -} - -func (s *JsonPersistenceService) NewStore(id string, subIDs ...string) Store { - return &JsonStore{ - ID: id, - Directory: filepath.Join(append([]string{s.Directory}, subIDs...)...), - } -} - -type JsonStore struct { - ID string - Directory string -} - -func (store JsonStore) Reset() error { - if _, err := os.Stat(store.Directory); os.IsNotExist(err) { - return nil - } - - p := filepath.Join(store.Directory, store.ID) + ".json" - if _, err := os.Stat(p); os.IsNotExist(err) { - return nil - } - - return os.Remove(p) -} - -func (store JsonStore) Load(val interface{}) error { - if _, err := os.Stat(store.Directory); os.IsNotExist(err) { - if err2 := os.Mkdir(store.Directory, 0777); err2 != nil { - return err2 - } - } - - p := filepath.Join(store.Directory, store.ID) + ".json" - - if _, err := os.Stat(p); os.IsNotExist(err) { - return ErrPersistenceNotExists - } - - data, err := ioutil.ReadFile(p) - if err != nil { - return err - } - - if len(data) == 0 { - return ErrPersistenceNotExists - } - - return json.Unmarshal(data, val) -} - -func (store JsonStore) Save(val interface{}) error { - if _, err := os.Stat(store.Directory); os.IsNotExist(err) { - if err2 := os.Mkdir(store.Directory, 0777); err2 != nil { - return err2 - } - } - - data, err := json.Marshal(val) - if err != nil { - return err - } - - p := filepath.Join(store.Directory, store.ID) + ".json" - return ioutil.WriteFile(p, data, 0666) -} - -type RedisPersistenceService struct { - redis *redis.Client -} - -func NewRedisPersistenceService(config *RedisPersistenceConfig) *RedisPersistenceService { - client := redis.NewClient(&redis.Options{ - Addr: net.JoinHostPort(config.Host, config.Port), - // Username: "", // username is only for redis 6.0 - Password: config.Password, // no password set - DB: config.DB, // use default DB - }) - - return &RedisPersistenceService{ - redis: client, - } -} - -func (s *RedisPersistenceService) NewStore(id string, subIDs ...string) Store { - if len(subIDs) > 0 { - id += ":" + strings.Join(subIDs, ":") - } - - return &RedisStore{ - redis: s.redis, - ID: id, - } -} - -type RedisStore struct { - redis *redis.Client - - ID string -} - -func (store *RedisStore) Load(val interface{}) error { - cmd := store.redis.Get(context.Background(), store.ID) - data, err := cmd.Result() - if err != nil { - if err == redis.Nil { - return ErrPersistenceNotExists - } - - return err - } - - if len(data) == 0 { - return ErrPersistenceNotExists - } - - return json.Unmarshal([]byte(data), val) -} - -func (store *RedisStore) Save(val interface{}) error { - data, err := json.Marshal(val) - if err != nil { - return err - } - - cmd := store.redis.Set(context.Background(), store.ID, data, 0) - _, err = cmd.Result() - return err -} - -func (store *RedisStore) Reset() error { - _, err := store.redis.Del(context.Background(), store.ID).Result() - return err -} diff --git a/pkg/bbgo/reflect.go b/pkg/bbgo/reflect.go new file mode 100644 index 0000000000..4f72e29ed6 --- /dev/null +++ b/pkg/bbgo/reflect.go @@ -0,0 +1,102 @@ +package bbgo + +import ( + "errors" + "fmt" + "reflect" +) + +type InstanceIDProvider interface { + InstanceID() string +} + +func callID(obj interface{}) string { + sv := reflect.ValueOf(obj) + st := reflect.TypeOf(obj) + if st.Implements(reflect.TypeOf((*InstanceIDProvider)(nil)).Elem()) { + m := sv.MethodByName("InstanceID") + ret := m.Call(nil) + return ret[0].String() + } + return "" +} + +func isSymbolBasedStrategy(rs reflect.Value) (string, bool) { + if rs.Kind() == reflect.Ptr { + rs = rs.Elem() + } + + field := rs.FieldByName("Symbol") + if !field.IsValid() { + return "", false + } + + if field.Kind() != reflect.String { + return "", false + } + + return field.String(), true +} + +func hasField(rs reflect.Value, fieldName string) (field reflect.Value, ok bool) { + field = rs.FieldByName(fieldName) + return field, field.IsValid() +} + +type StructFieldIterator func(tag string, ft reflect.StructField, fv reflect.Value) error + +var errCanNotIterateNilPointer = errors.New("can not iterate struct on a nil pointer") + +func iterateFieldsByTag(obj interface{}, tagName string, cb StructFieldIterator) error { + sv := reflect.ValueOf(obj) + st := reflect.TypeOf(obj) + + if st.Kind() != reflect.Ptr { + return fmt.Errorf("f should be a pointer of a struct, %s given", st) + } + + // for pointer, check if it's nil + if sv.IsNil() { + return errCanNotIterateNilPointer + } + + // solve the reference + st = st.Elem() + sv = sv.Elem() + + if st.Kind() != reflect.Struct { + return fmt.Errorf("f should be a struct, %s given", st) + } + + for i := 0; i < sv.NumField(); i++ { + fv := sv.Field(i) + ft := st.Field(i) + + // skip unexported fields + if !st.Field(i).IsExported() { + continue + } + + tag, ok := ft.Tag.Lookup(tagName) + if !ok { + continue + } + + if err := cb(tag, ft, fv); err != nil { + return err + } + } + + return nil +} + +// https://github.com/xiaojun207/go-base-utils/blob/master/utils/Clone.go +func newTypeValueInterface(typ reflect.Type) interface{} { + if typ.Kind() == reflect.Ptr { + typ = typ.Elem() + dst := reflect.New(typ).Elem() + return dst.Addr().Interface() + } + dst := reflect.New(typ) + return dst.Interface() +} diff --git a/pkg/bbgo/reporter.go b/pkg/bbgo/reporter.go index bd6018095d..e8bf85a442 100644 --- a/pkg/bbgo/reporter.go +++ b/pkg/bbgo/reporter.go @@ -4,8 +4,6 @@ import ( "regexp" "github.com/robfig/cron/v3" - - "github.com/c9s/bbgo/pkg/accounting/pnl" ) type PnLReporter interface { @@ -67,17 +65,20 @@ func (reporter *AverageCostPnLReporter) When(specs ...string) *AverageCostPnLRep } func (reporter *AverageCostPnLReporter) Run() { - for _, sessionName := range reporter.Sessions { - session := reporter.environment.sessions[sessionName] - calculator := &pnl.AverageCostCalculator{ - TradingFeeCurrency: session.Exchange.PlatformFeeCurrency(), - } - - for _, symbol := range reporter.Symbols { - report := calculator.Calculate(symbol, session.Trades[symbol].Copy(), session.lastPrices[symbol]) - report.Print() + // FIXME: this is causing cyclic import + /* + for _, sessionName := range reporter.Sessions { + session := reporter.environment.sessions[sessionName] + calculator := &pnl.AverageCostCalculator{ + TradingFeeCurrency: session.Exchange.PlatformFeeCurrency(), + } + + for _, symbol := range reporter.Symbols { + report := calculator.Calculate(symbol, session.Trades[symbol].Copy(), session.lastPrices[symbol]) + report.Print() + } } - } + */ } type PatternChannelRouter struct { @@ -147,6 +148,4 @@ type TradeReporter struct { *Notifiability } -const TemplateTradeReport = `:handshake: {{ .Symbol }} {{ .Side }} Trade Execution @ {{ .Price }}` - const TemplateOrderReport = `:handshake: {{ .Symbol }} {{ .Side }} Order Update @ {{ .Price }}` diff --git a/pkg/bbgo/risk_controls.go b/pkg/bbgo/risk_controls.go index 266c0ca012..121a6fc030 100644 --- a/pkg/bbgo/risk_controls.go +++ b/pkg/bbgo/risk_controls.go @@ -32,7 +32,7 @@ func (e *RiskControlOrderExecutor) SubmitOrders(ctx context.Context, orders ...t } } - formattedOrders, err := formatOrders(e.Session, orders) + formattedOrders, err := e.Session.FormatOrders(orders) if err != nil { return retOrders, err } diff --git a/pkg/bbgo/scale.go b/pkg/bbgo/scale.go new file mode 100644 index 0000000000..ec4064b3e1 --- /dev/null +++ b/pkg/bbgo/scale.go @@ -0,0 +1,396 @@ +package bbgo + +import ( + "fmt" + "math" + + "github.com/pkg/errors" +) + +type Scale interface { + Solve() error + Formula() string + FormulaOf(x float64) string + Call(x float64) (y float64) +} + +func init() { + _ = Scale(&ExponentialScale{}) + _ = Scale(&LogarithmicScale{}) + _ = Scale(&LinearScale{}) + _ = Scale(&QuadraticScale{}) +} + +// y := ab^x +// shift xs[0] to 0 (x - h) +// a = y1 +// +// y := ab^(x-h) +// y2/a = b^(x2-h) +// y2/y1 = b^(x2-h) +// +// also posted at https://play.golang.org/p/JlWlwZjoebE +type ExponentialScale struct { + Domain [2]float64 `json:"domain"` + Range [2]float64 `json:"range"` + + a float64 + b float64 + h float64 + s float64 +} + +func (s *ExponentialScale) Solve() error { + if s.Domain[0] > s.Domain[1] { + return errors.New("domain[0] can not greater than domain[1]") + } + + if s.Range[0] == 0 { + return errors.New("for ExponentialScale, range can not start from 0") + } + + s.h = s.Domain[0] + s.a = s.Range[0] + s.b = math.Pow(s.Range[1]/s.Range[0], 1/(s.Domain[1]-s.h)) + s.s = s.Domain[1] - s.h + return nil +} + +func (s *ExponentialScale) String() string { + return s.Formula() +} + +func (s *ExponentialScale) Formula() string { + return fmt.Sprintf("f(x) = %f * %f ^ (x - %f)", s.a, s.b, s.h) +} + +func (s *ExponentialScale) FormulaOf(x float64) string { + return fmt.Sprintf("f(%f) = %f * %f ^ (%f - %f)", x, s.a, s.b, x, s.h) +} + +func (s *ExponentialScale) Call(x float64) (y float64) { + if x < s.Domain[0] { + x = s.Domain[0] + } else if x > s.Domain[1] { + x = s.Domain[1] + } + + y = s.a * math.Pow(s.Range[1]/s.Range[0], (x-s.h)/s.s) + return y +} + +type LogarithmicScale struct { + Domain [2]float64 `json:"domain"` + Range [2]float64 `json:"range"` + + h float64 + s float64 + a float64 +} + +func (s *LogarithmicScale) Call(x float64) (y float64) { + if x < s.Domain[0] { + x = s.Domain[0] + } else if x > s.Domain[1] { + x = s.Domain[1] + } + + // y = a * log(x - h) + s + y = s.a*math.Log(x-s.h) + s.s + return y +} + +func (s *LogarithmicScale) String() string { + return s.Formula() +} + +func (s *LogarithmicScale) Formula() string { + return fmt.Sprintf("f(x) = %f * log(x - %f) + %f", s.a, s.h, s.s) +} + +func (s *LogarithmicScale) FormulaOf(x float64) string { + return fmt.Sprintf("f(%f) = %f * log(%f - %f) + %f", x, s.a, x, s.h, s.s) +} + +func (s *LogarithmicScale) Solve() error { + // f(x) = a * log2(x - h) + s + // + // log2(1) = 0 + // + // h = x1 - 1 + // s = y1 + // + // y2 = a * log(x2 - h) + s + // y2 = a * log(x2 - h) + y1 + // y2 - y1 = a * log(x2 - h) + // a = (y2 - y1) / log(x2 - h) + s.h = s.Domain[0] - 1 + s.s = s.Range[0] + s.a = (s.Range[1] - s.Range[0]) / math.Log(s.Domain[1]-s.h) + return nil +} + +type LinearScale struct { + Domain [2]float64 `json:"domain"` + Range [2]float64 `json:"range"` + + a, b float64 +} + +func (s *LinearScale) Solve() error { + xs := s.Domain + ys := s.Range + // y1 = a * x1 + b + // y2 = a * x2 + b + // y2 - y1 = (a * x2 + b) - (a * x1 + b) + // y2 - y1 = (a * x2) - (a * x1) + // y2 - y1 = a * (x2 - x1) + + // a = (y2 - y1) / (x2 - x1) + // b = y1 - (a * x1) + s.a = (ys[1] - ys[0]) / (xs[1] - xs[0]) + s.b = ys[0] - (s.a * xs[0]) + return nil +} + +func (s *LinearScale) Call(x float64) (y float64) { + if x < s.Domain[0] { + x = s.Domain[0] + } else if x > s.Domain[1] { + x = s.Domain[1] + } + + y = s.a*x + s.b + return y +} + +func (s *LinearScale) String() string { + return s.Formula() +} + +func (s *LinearScale) Formula() string { + return fmt.Sprintf("f(x) = %f * x + %f", s.a, s.b) +} + +func (s *LinearScale) FormulaOf(x float64) string { + return fmt.Sprintf("f(%f) = %f * %f + %f", x, s.a, x, s.b) +} + +// see also: http://www.vb-helper.com/howto_find_quadratic_curve.html +type QuadraticScale struct { + Domain [3]float64 `json:"domain"` + Range [3]float64 `json:"range"` + + a, b, c float64 +} + +func (s *QuadraticScale) Solve() error { + xs := s.Domain + ys := s.Range + s.a = ((ys[1]-ys[0])*(xs[0]-xs[2]) + (ys[2]-ys[0])*(xs[1]-xs[0])) / + ((xs[0]-xs[2])*(math.Pow(xs[1], 2)-math.Pow(xs[0], 2)) + (xs[1]-xs[0])*(math.Pow(xs[2], 2)-math.Pow(xs[0], 2))) + + s.b = ((ys[1] - ys[0]) - s.a*(math.Pow(xs[1], 2)-math.Pow(xs[0], 2))) / (xs[1] - xs[0]) + s.c = ys[1] - s.a*math.Pow(xs[1], 2) - s.b*xs[1] + return nil +} + +func (s *QuadraticScale) Call(x float64) (y float64) { + if x < s.Domain[0] { + x = s.Domain[0] + } else if x > s.Domain[2] { + x = s.Domain[2] + } + + // y = a * log(x - h) + s + y = s.a*math.Pow(x, 2) + s.b*x + s.c + return y +} + +func (s *QuadraticScale) String() string { + return s.Formula() +} + +func (s *QuadraticScale) Formula() string { + return fmt.Sprintf("f(x) = %f * x ^ 2 + %f * x + %f", s.a, s.b, s.c) +} + +func (s *QuadraticScale) FormulaOf(x float64) string { + return fmt.Sprintf("f(%f) = %f * %f ^ 2 + %f * %f + %f", x, s.a, x, s.b, x, s.c) +} + +type SlideRule struct { + // Scale type could be one of "log", "exp", "linear", "quadratic" + // this is similar to the d3.scale + LinearScale *LinearScale `json:"linear"` + LogScale *LogarithmicScale `json:"log"` + ExpScale *ExponentialScale `json:"exp"` + QuadraticScale *QuadraticScale `json:"quadratic"` +} + +func (rule *SlideRule) Range() ([2]float64, error) { + if rule.LogScale != nil { + return rule.LogScale.Range, nil + } + + if rule.ExpScale != nil { + return rule.ExpScale.Range, nil + } + + if rule.LinearScale != nil { + return rule.LinearScale.Range, nil + } + + if rule.QuadraticScale != nil { + r := rule.QuadraticScale.Range + return [2]float64{r[0], r[len(r)-1]}, nil + } + + return [2]float64{}, errors.New("no any scale domain is defined") +} + +func (rule *SlideRule) Scale() (Scale, error) { + if rule.LogScale != nil { + return rule.LogScale, nil + } + + if rule.ExpScale != nil { + return rule.ExpScale, nil + } + + if rule.LinearScale != nil { + return rule.LinearScale, nil + } + + if rule.QuadraticScale != nil { + return rule.QuadraticScale, nil + } + + return nil, errors.New("no any scale is defined") +} + +// LayerScale defines the scale DSL for maker layers, e.g., +// +// quantityScale: +// byLayer: +// exp: +// domain: [1, 5] +// range: [0.01, 1.0] +// +// and +// +// quantityScale: +// byLayer: +// linear: +// domain: [1, 3] +// range: [0.01, 1.0] +type LayerScale struct { + LayerRule *SlideRule `json:"byLayer"` +} + +func (s *LayerScale) Scale(layer int) (quantity float64, err error) { + if s.LayerRule == nil { + err = errors.New("either price or volume scale is not defined") + return + } + + scale, err := s.LayerRule.Scale() + if err != nil { + return 0, err + } + + if err := scale.Solve(); err != nil { + return 0, err + } + + return scale.Call(float64(layer)), nil +} + +// PriceVolumeScale defines the scale DSL for strategy, e.g., +// +// quantityScale: +// byPrice: +// exp: +// domain: [10_000, 50_000] +// range: [0.01, 1.0] +// +// and +// +// quantityScale: +// byVolume: +// linear: +// domain: [10_000, 50_000] +// range: [0.01, 1.0] +type PriceVolumeScale struct { + ByPriceRule *SlideRule `json:"byPrice"` + ByVolumeRule *SlideRule `json:"byVolume"` +} + +func (s *PriceVolumeScale) Scale(price float64, volume float64) (quantity float64, err error) { + if s.ByPriceRule != nil { + quantity, err = s.ScaleByPrice(price) + return + } else if s.ByVolumeRule != nil { + quantity, err = s.ScaleByVolume(volume) + } else { + err = errors.New("either price or volume scale is not defined") + } + return +} + +// ScaleByPrice scale quantity by the given price +func (s *PriceVolumeScale) ScaleByPrice(price float64) (float64, error) { + if s.ByPriceRule == nil { + return 0, errors.New("byPrice scale is not defined") + } + + scale, err := s.ByPriceRule.Scale() + if err != nil { + return 0, err + } + + if err := scale.Solve(); err != nil { + return 0, err + } + + return scale.Call(price), nil +} + +// ScaleByVolume scale quantity by the given volume +func (s *PriceVolumeScale) ScaleByVolume(volume float64) (float64, error) { + if s.ByVolumeRule == nil { + return 0, errors.New("byVolume scale is not defined") + } + + scale, err := s.ByVolumeRule.Scale() + if err != nil { + return 0, err + } + + if err := scale.Solve(); err != nil { + return 0, err + } + + return scale.Call(volume), nil +} + +type PercentageScale struct { + ByPercentage *SlideRule `json:"byPercentage"` +} + +func (s *PercentageScale) Scale(percentage float64) (float64, error) { + if s.ByPercentage == nil { + return 0.0, errors.New("percentage scale is not defined") + } + + scale, err := s.ByPercentage.Scale() + if err != nil { + return 0.0, err + } + + if err := scale.Solve(); err != nil { + return 0.0, err + } + + return scale.Call(percentage), nil +} diff --git a/pkg/bbgo/scale_test.go b/pkg/bbgo/scale_test.go new file mode 100644 index 0000000000..66e8544e36 --- /dev/null +++ b/pkg/bbgo/scale_test.go @@ -0,0 +1,201 @@ +package bbgo + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +const Delta = 1e-9 + +func TestExponentialScale(t *testing.T) { + // graph see: https://www.desmos.com/calculator/ip0ijbcbbf + scale := ExponentialScale{ + Domain: [2]float64{1000, 2000}, + Range: [2]float64{0.001, 0.01}, + } + + err := scale.Solve() + assert.NoError(t, err) + + assert.Equal(t, "f(x) = 0.001000 * 1.002305 ^ (x - 1000.000000)", scale.String()) + assert.InDelta(t, 0.001, scale.Call(1000.0), Delta) + assert.InDelta(t, 0.01, scale.Call(2000.0), Delta) + + for x := 1000; x <= 2000; x += 100 { + y := scale.Call(float64(x)) + t.Logf("%s = %f", scale.FormulaOf(float64(x)), y) + } +} + +func TestExponentialScale_Reverse(t *testing.T) { + scale := ExponentialScale{ + Domain: [2]float64{1000, 2000}, + Range: [2]float64{0.1, 0.001}, + } + + err := scale.Solve() + assert.NoError(t, err) + + assert.Equal(t, "f(x) = 0.100000 * 0.995405 ^ (x - 1000.000000)", scale.String()) + assert.InDelta(t, 0.1, scale.Call(1000.0), Delta) + assert.InDelta(t, 0.001, scale.Call(2000.0), Delta) + + for x := 1000; x <= 2000; x += 100 { + y := scale.Call(float64(x)) + t.Logf("%s = %f", scale.FormulaOf(float64(x)), y) + } +} + +func TestLogScale(t *testing.T) { + // see https://www.desmos.com/calculator/q1ufxx5gry + scale := LogarithmicScale{ + Domain: [2]float64{1000, 2000}, + Range: [2]float64{0.001, 0.01}, + } + + err := scale.Solve() + assert.NoError(t, err) + assert.Equal(t, "f(x) = 0.001303 * log(x - 999.000000) + 0.001000", scale.String()) + assert.InDelta(t, 0.001, scale.Call(1000.0), Delta) + assert.InDelta(t, 0.01, scale.Call(2000.0), Delta) + for x := 1000; x <= 2000; x += 100 { + y := scale.Call(float64(x)) + t.Logf("%s = %f", scale.FormulaOf(float64(x)), y) + } +} + +func TestLinearScale(t *testing.T) { + scale := LinearScale{ + Domain: [2]float64{1000, 2000}, + Range: [2]float64{3, 10}, + } + + err := scale.Solve() + assert.NoError(t, err) + assert.Equal(t, "f(x) = 0.007000 * x + -4.000000", scale.String()) + assert.InDelta(t, 3, scale.Call(1000), Delta) + assert.InDelta(t, 10, scale.Call(2000), Delta) + for x := 1000; x <= 2000; x += 100 { + y := scale.Call(float64(x)) + t.Logf("%s = %f", scale.FormulaOf(float64(x)), y) + } +} + +func TestLinearScale2(t *testing.T) { + scale := LinearScale{ + Domain: [2]float64{1, 3}, + Range: [2]float64{0.1, 0.4}, + } + + err := scale.Solve() + assert.NoError(t, err) + assert.Equal(t, "f(x) = 0.150000 * x + -0.050000", scale.String()) + assert.InDelta(t, 0.1, scale.Call(1), Delta) + assert.InDelta(t, 0.4, scale.Call(3), Delta) +} + +func TestQuadraticScale(t *testing.T) { + // see https://www.desmos.com/calculator/vfqntrxzpr + scale := QuadraticScale{ + Domain: [3]float64{0, 100, 200}, + Range: [3]float64{1, 20, 50}, + } + + err := scale.Solve() + assert.NoError(t, err) + assert.Equal(t, "f(x) = 0.000550 * x ^ 2 + 0.135000 * x + 1.000000", scale.String()) + assert.InDelta(t, 1, scale.Call(0), Delta) + assert.InDelta(t, 20, scale.Call(100.0), Delta) + assert.InDelta(t, 50.0, scale.Call(200.0), Delta) + for x := 0; x <= 200; x += 1 { + y := scale.Call(float64(x)) + t.Logf("%s = %f", scale.FormulaOf(float64(x)), y) + } +} + +func TestPercentageScale(t *testing.T) { + t.Run("from 0.0 to 1.0", func(t *testing.T) { + s := &PercentageScale{ + ByPercentage: &SlideRule{ + ExpScale: &ExponentialScale{ + Domain: [2]float64{0.0, 1.0}, + Range: [2]float64{1.0, 100.0}, + }, + }, + } + + v, err := s.Scale(0.0) + assert.NoError(t, err) + assert.InDelta(t, 1.0, v, Delta) + + v, err = s.Scale(1.0) + assert.NoError(t, err) + assert.InDelta(t, 100.0, v, Delta) + }) + + t.Run("from -1.0 to 1.0", func(t *testing.T) { + s := &PercentageScale{ + ByPercentage: &SlideRule{ + ExpScale: &ExponentialScale{ + Domain: [2]float64{-1.0, 1.0}, + Range: [2]float64{10.0, 100.0}, + }, + }, + } + + v, err := s.Scale(-1.0) + assert.NoError(t, err) + assert.InDelta(t, 10.0, v, Delta) + + v, err = s.Scale(1.0) + assert.NoError(t, err) + assert.InDelta(t, 100.0, v, Delta) + }) + + t.Run("reverse -1.0 to 1.0", func(t *testing.T) { + s := &PercentageScale{ + ByPercentage: &SlideRule{ + ExpScale: &ExponentialScale{ + Domain: [2]float64{-1.0, 1.0}, + Range: [2]float64{100.0, 10.0}, + }, + }, + } + + v, err := s.Scale(-1.0) + assert.NoError(t, err) + assert.InDelta(t, 100.0, v, Delta) + + v, err = s.Scale(1.0) + assert.NoError(t, err) + assert.InDelta(t, 10.0, v, Delta) + + v, err = s.Scale(2.0) + assert.NoError(t, err) + assert.InDelta(t, 10.0, v, Delta) + + v, err = s.Scale(-2.0) + assert.NoError(t, err) + assert.InDelta(t, 100.0, v, Delta) + }) + + t.Run("negative range", func(t *testing.T) { + s := &PercentageScale{ + ByPercentage: &SlideRule{ + ExpScale: &ExponentialScale{ + Domain: [2]float64{0.0, 1.0}, + Range: [2]float64{-100.0, 100.0}, + }, + }, + } + + v, err := s.Scale(0.0) + assert.NoError(t, err) + assert.InDelta(t, -100.0, v, Delta) + + v, err = s.Scale(1.0) + assert.NoError(t, err) + assert.InDelta(t, 100.0, v, Delta) + }) +} diff --git a/pkg/bbgo/session.go b/pkg/bbgo/session.go index bdbcc1a961..c6796c8f1c 100644 --- a/pkg/bbgo/session.go +++ b/pkg/bbgo/session.go @@ -1,30 +1,62 @@ package bbgo import ( + "context" "fmt" + "strings" + "sync" + "time" + "github.com/slack-go/slack" + + "github.com/prometheus/client_golang/prometheus" + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + + "github.com/c9s/bbgo/pkg/cache" + + exchange2 "github.com/c9s/bbgo/pkg/exchange" + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/service" "github.com/c9s/bbgo/pkg/types" + "github.com/c9s/bbgo/pkg/util" ) +var ( + debugEWMA = false + debugSMA = false +) + +func init() { + // when using --dotenv option, the dotenv is loaded from command.PersistentPreRunE, not init. + // hence here the env var won't enable the debug flag + util.SetEnvVarBool("DEBUG_EWMA", &debugEWMA) + util.SetEnvVarBool("DEBUG_SMA", &debugSMA) +} + type StandardIndicatorSet struct { Symbol string // Standard indicators // interval -> window - sma map[types.IntervalWindow]*indicator.SMA - ewma map[types.IntervalWindow]*indicator.EWMA - boll map[types.IntervalWindow]*indicator.BOLL + sma map[types.IntervalWindow]*indicator.SMA + ewma map[types.IntervalWindow]*indicator.EWMA + boll map[types.IntervalWindowBandWidth]*indicator.BOLL + stoch map[types.IntervalWindow]*indicator.STOCH + volatility map[types.IntervalWindow]*indicator.VOLATILITY store *MarketDataStore } func NewStandardIndicatorSet(symbol string, store *MarketDataStore) *StandardIndicatorSet { set := &StandardIndicatorSet{ - Symbol: symbol, - sma: make(map[types.IntervalWindow]*indicator.SMA), - ewma: make(map[types.IntervalWindow]*indicator.EWMA), - boll: make(map[types.IntervalWindow]*indicator.BOLL), - store: store, + Symbol: symbol, + sma: make(map[types.IntervalWindow]*indicator.SMA), + ewma: make(map[types.IntervalWindow]*indicator.EWMA), + boll: make(map[types.IntervalWindowBandWidth]*indicator.BOLL), + stoch: make(map[types.IntervalWindow]*indicator.STOCH), + volatility: make(map[types.IntervalWindow]*indicator.VOLATILITY), + store: store, } // let us pre-defined commonly used intervals @@ -33,30 +65,46 @@ func NewStandardIndicatorSet(symbol string, store *MarketDataStore) *StandardInd iw := types.IntervalWindow{Interval: interval, Window: window} set.sma[iw] = &indicator.SMA{IntervalWindow: iw} set.sma[iw].Bind(store) + if debugSMA { + set.sma[iw].OnUpdate(func(value float64) { + log.Infof("%s SMA %s: %f", symbol, iw.String(), value) + }) + } set.ewma[iw] = &indicator.EWMA{IntervalWindow: iw} set.ewma[iw].Bind(store) + + // if debug EWMA is enabled, we add the debug handler + if debugEWMA { + set.ewma[iw].OnUpdate(func(value float64) { + log.Infof("%s EWMA %s: %f", symbol, iw.String(), value) + }) + } + } // setup boll indicator, we may refactor boll indicator by subscribing SMA indicator, // however, since general used BOLLINGER band use window 21, which is not in the existing SMA indicator sets. // Pull out the bandwidth configuration as the boll Key iw := types.IntervalWindow{Interval: interval, Window: 21} - set.boll[iw] = &indicator.BOLL{IntervalWindow: iw, K: 2.0} - set.boll[iw].Bind(store) + + // set efault band width to 2.0 + iwb := types.IntervalWindowBandWidth{IntervalWindow: iw, BandWidth: 2.0} + set.boll[iwb] = &indicator.BOLL{IntervalWindow: iw, K: iwb.BandWidth} + set.boll[iwb].Bind(store) } return set } -// BOLL returns the bollinger band indicator of the given interval and the window, -// Please note that the K for std dev is fixed and defaults to 2.0 +// BOLL returns the bollinger band indicator of the given interval, the window and bandwidth func (set *StandardIndicatorSet) BOLL(iw types.IntervalWindow, bandWidth float64) *indicator.BOLL { - inc, ok := set.boll[iw] + iwb := types.IntervalWindowBandWidth{IntervalWindow: iw, BandWidth: bandWidth} + inc, ok := set.boll[iwb] if !ok { - inc := &indicator.BOLL{IntervalWindow: iw, K: bandWidth} + inc = &indicator.BOLL{IntervalWindow: iw, K: bandWidth} inc.Bind(set.store) - set.boll[iw] = inc + set.boll[iwb] = inc } return inc @@ -66,7 +114,7 @@ func (set *StandardIndicatorSet) BOLL(iw types.IntervalWindow, bandWidth float64 func (set *StandardIndicatorSet) SMA(iw types.IntervalWindow) *indicator.SMA { inc, ok := set.sma[iw] if !ok { - inc := &indicator.SMA{IntervalWindow: iw} + inc = &indicator.SMA{IntervalWindow: iw} inc.Bind(set.store) set.sma[iw] = inc } @@ -74,11 +122,11 @@ func (set *StandardIndicatorSet) SMA(iw types.IntervalWindow) *indicator.SMA { return inc } -// GetEWMA returns the exponential weighed moving average indicator of the given interval and the window size. +// EWMA returns the exponential weighed moving average indicator of the given interval and the window size. func (set *StandardIndicatorSet) EWMA(iw types.IntervalWindow) *indicator.EWMA { inc, ok := set.ewma[iw] if !ok { - inc := &indicator.EWMA{IntervalWindow: iw} + inc = &indicator.EWMA{IntervalWindow: iw} inc.Bind(set.store) set.ewma[iw] = inc } @@ -86,82 +134,448 @@ func (set *StandardIndicatorSet) EWMA(iw types.IntervalWindow) *indicator.EWMA { return inc } +func (set *StandardIndicatorSet) STOCH(iw types.IntervalWindow) *indicator.STOCH { + inc, ok := set.stoch[iw] + if !ok { + inc = &indicator.STOCH{IntervalWindow: iw} + inc.Bind(set.store) + set.stoch[iw] = inc + } + + return inc +} + +// VOLATILITY returns the volatility(stddev) indicator of the given interval and the window size. +func (set *StandardIndicatorSet) VOLATILITY(iw types.IntervalWindow) *indicator.VOLATILITY { + inc, ok := set.volatility[iw] + if !ok { + inc = &indicator.VOLATILITY{IntervalWindow: iw} + inc.Bind(set.store) + set.volatility[iw] = inc + } + + return inc +} + // ExchangeSession presents the exchange connection Session // It also maintains and collects the data returned from the stream. type ExchangeSession struct { // exchange Session based notification system // we make it as a value field so that we can configure it separately - Notifiability + Notifiability `json:"-" yaml:"-"` + + // --------------------------- + // Session config fields + // --------------------------- // Exchange Session name - Name string + Name string `json:"name,omitempty" yaml:"name,omitempty"` + ExchangeName types.ExchangeName `json:"exchange" yaml:"exchange"` + EnvVarPrefix string `json:"envVarPrefix" yaml:"envVarPrefix"` + Key string `json:"key,omitempty" yaml:"key,omitempty"` + Secret string `json:"secret,omitempty" yaml:"secret,omitempty"` + Passphrase string `json:"passphrase,omitempty" yaml:"passphrase,omitempty"` + SubAccount string `json:"subAccount,omitempty" yaml:"subAccount,omitempty"` + + // Withdrawal is used for enabling withdrawal functions + Withdrawal bool `json:"withdrawal,omitempty" yaml:"withdrawal,omitempty"` + MakerFeeRate fixedpoint.Value `json:"makerFeeRate" yaml:"makerFeeRate"` + TakerFeeRate fixedpoint.Value `json:"takerFeeRate" yaml:"takerFeeRate"` + + PublicOnly bool `json:"publicOnly,omitempty" yaml:"publicOnly"` + Margin bool `json:"margin,omitempty" yaml:"margin"` + IsolatedMargin bool `json:"isolatedMargin,omitempty" yaml:"isolatedMargin,omitempty"` + IsolatedMarginSymbol string `json:"isolatedMarginSymbol,omitempty" yaml:"isolatedMarginSymbol,omitempty"` + + Futures bool `json:"futures,omitempty" yaml:"futures"` + IsolatedFutures bool `json:"isolatedFutures,omitempty" yaml:"isolatedFutures,omitempty"` + IsolatedFuturesSymbol string `json:"isolatedFuturesSymbol,omitempty" yaml:"isolatedFuturesSymbol,omitempty"` + + // --------------------------- + // Runtime fields + // --------------------------- // The exchange account states - Account *types.Account + Account *types.Account `json:"-" yaml:"-"` + accountMutex sync.Mutex - // Stream is the connection stream of the exchange - Stream types.Stream + IsInitialized bool `json:"-" yaml:"-"` - Subscriptions map[types.Subscription]types.Subscription + OrderExecutor *ExchangeOrderExecutor `json:"orderExecutor,omitempty" yaml:"orderExecutor,omitempty"` - Exchange types.Exchange + // UserDataStream is the connection stream of the exchange + UserDataStream types.Stream `json:"-" yaml:"-"` + MarketDataStream types.Stream `json:"-" yaml:"-"` - // markets defines market configuration of a symbol - markets map[string]types.Market + // Subscriptions + // this is a read-only field when running strategy + Subscriptions map[types.Subscription]types.Subscription `json:"-" yaml:"-"` - // startPrices is used for backtest - startPrices map[string]float64 + Exchange types.Exchange `json:"-" yaml:"-"` - lastPrices map[string]float64 + UseHeikinAshi bool `json:"heikinAshi,omitempty" yaml:"heikinAshi,omitempty"` // Trades collects the executed trades from the exchange // map: symbol -> []trade - Trades map[string]*types.TradeSlice + Trades map[string]*types.TradeSlice `json:"-" yaml:"-"` + + // markets defines market configuration of a symbol + markets map[string]types.Market + + // orderBooks stores the streaming order book + orderBooks map[string]*types.StreamOrderBook + + // startPrices is used for backtest + startPrices map[string]fixedpoint.Value + + lastPrices map[string]fixedpoint.Value + lastPriceUpdatedAt time.Time // marketDataStores contains the market data store of each market marketDataStores map[string]*MarketDataStore - positions map[string]*Position + positions map[string]*types.Position // standard indicators of each market standardIndicatorSets map[string]*StandardIndicatorSet orderStores map[string]*OrderStore - loadedSymbols map[string]struct{} + usedSymbols map[string]struct{} + initializedSymbols map[string]struct{} - IsMargin bool - - IsIsolatedMargin bool - - IsolatedMarginSymbol string + logger *log.Entry } func NewExchangeSession(name string, exchange types.Exchange) *ExchangeSession { - return &ExchangeSession{ + userDataStream := exchange.NewStream() + marketDataStream := exchange.NewStream() + marketDataStream.SetPublicOnly() + + session := &ExchangeSession{ Notifiability: Notifiability{ SymbolChannelRouter: NewPatternChannelRouter(nil), SessionChannelRouter: NewPatternChannelRouter(nil), ObjectChannelRouter: NewObjectChannelRouter(), }, - Name: name, - Exchange: exchange, - Stream: exchange.NewStream(), - Subscriptions: make(map[types.Subscription]types.Subscription), - Account: &types.Account{}, - Trades: make(map[string]*types.TradeSlice), + Name: name, + Exchange: exchange, + UserDataStream: userDataStream, + MarketDataStream: marketDataStream, + Subscriptions: make(map[types.Subscription]types.Subscription), + Account: &types.Account{}, + Trades: make(map[string]*types.TradeSlice), + orderBooks: make(map[string]*types.StreamOrderBook), markets: make(map[string]types.Market), - startPrices: make(map[string]float64), - lastPrices: make(map[string]float64), - positions: make(map[string]*Position), + startPrices: make(map[string]fixedpoint.Value), + lastPrices: make(map[string]fixedpoint.Value), + positions: make(map[string]*types.Position), marketDataStores: make(map[string]*MarketDataStore), standardIndicatorSets: make(map[string]*StandardIndicatorSet), orderStores: make(map[string]*OrderStore), + usedSymbols: make(map[string]struct{}), + initializedSymbols: make(map[string]struct{}), + logger: log.WithField("session", name), + } + + session.OrderExecutor = &ExchangeOrderExecutor{ + // copy the notification system so that we can route + Notifiability: session.Notifiability, + Session: session, + } + + return session +} + +func (session *ExchangeSession) GetAccount() (a *types.Account) { + session.accountMutex.Lock() + a = session.Account + session.accountMutex.Unlock() + return a +} + +// UpdateAccount locks the account mutex and update the account object +func (session *ExchangeSession) UpdateAccount(ctx context.Context) (*types.Account, error) { + account, err := session.Exchange.QueryAccount(ctx) + if err != nil { + return nil, err + } + + session.accountMutex.Lock() + session.Account = account + session.accountMutex.Unlock() + return account, nil +} + +// Init initializes the basic data structure and market information by its exchange. +// Note that the subscribed symbols are not loaded in this stage. +func (session *ExchangeSession) Init(ctx context.Context, environ *Environment) error { + if session.IsInitialized { + return ErrSessionAlreadyInitialized + } + + var log = log.WithField("session", session.Name) + + // load markets first + + var disableMarketsCache = false + var markets types.MarketMap + var err error + if util.SetEnvVarBool("DISABLE_MARKETS_CACHE", &disableMarketsCache); disableMarketsCache { + markets, err = session.Exchange.QueryMarkets(ctx) + } else { + markets, err = cache.LoadExchangeMarketsWithCache(ctx, session.Exchange) + if err != nil { + return err + } + } + + if len(markets) == 0 { + return fmt.Errorf("market config should not be empty") + } + + session.markets = markets + + if feeRateProvider, ok := session.Exchange.(types.ExchangeDefaultFeeRates); ok { + defaultFeeRates := feeRateProvider.DefaultFeeRates() + if session.MakerFeeRate.IsZero() { + session.MakerFeeRate = defaultFeeRates.MakerFeeRate + } + if session.TakerFeeRate.IsZero() { + session.TakerFeeRate = defaultFeeRates.TakerFeeRate + } + } + + if session.UseHeikinAshi { + session.MarketDataStream = &types.HeikinAshiStream{ + StandardStreamEmitter: session.MarketDataStream.(types.StandardStreamEmitter), + } + } + + // query and initialize the balances + if !session.PublicOnly { + account, err := session.Exchange.QueryAccount(ctx) + if err != nil { + return err + } + + session.accountMutex.Lock() + session.Account = account + session.accountMutex.Unlock() + + log.Infof("%s account", session.Name) + account.Balances().Print() + + // forward trade updates and order updates to the order executor + session.UserDataStream.OnTradeUpdate(session.OrderExecutor.EmitTradeUpdate) + session.UserDataStream.OnOrderUpdate(session.OrderExecutor.EmitOrderUpdate) + + session.UserDataStream.OnBalanceSnapshot(func(balances types.BalanceMap) { + session.accountMutex.Lock() + session.Account.UpdateBalances(balances) + session.accountMutex.Unlock() + }) + + session.UserDataStream.OnBalanceUpdate(func(balances types.BalanceMap) { + session.accountMutex.Lock() + session.Account.UpdateBalances(balances) + session.accountMutex.Unlock() + }) + + session.bindConnectionStatusNotification(session.UserDataStream, "user data") + + // if metrics mode is enabled, we bind the callbacks to update metrics + if viper.GetBool("metrics") { + session.metricsBalancesUpdater(account.Balances()) + session.bindUserDataStreamMetrics(session.UserDataStream) + } + } + + // add trade logger + session.UserDataStream.OnTradeUpdate(func(trade types.Trade) { + log.Info(trade.String()) + }) + + if viper.GetBool("debug-kline") { + session.MarketDataStream.OnKLine(func(kline types.KLine) { + log.WithField("marketData", "kline").Infof("kline: %+v", kline) + }) + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + log.WithField("marketData", "kline").Infof("kline closed: %+v", kline) + }) + } + + // update last prices + if session.UseHeikinAshi { + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + if _, ok := session.startPrices[kline.Symbol]; !ok { + session.startPrices[kline.Symbol] = kline.Open + } + + session.lastPrices[kline.Symbol] = session.MarketDataStream.(*types.HeikinAshiStream).LastOrigin[kline.Symbol][kline.Interval].Close + }) + } else { + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + if _, ok := session.startPrices[kline.Symbol]; !ok { + session.startPrices[kline.Symbol] = kline.Open + } + + session.lastPrices[kline.Symbol] = kline.Close + }) + } + + session.MarketDataStream.OnMarketTrade(func(trade types.Trade) { + session.lastPrices[trade.Symbol] = trade.Price + }) + + session.IsInitialized = true + return nil +} + +func (session *ExchangeSession) InitSymbols(ctx context.Context, environ *Environment) error { + if err := session.initUsedSymbols(ctx, environ); err != nil { + return err + } + + return nil +} + +// initUsedSymbols uses usedSymbols to initialize the related data structure +func (session *ExchangeSession) initUsedSymbols(ctx context.Context, environ *Environment) error { + for symbol := range session.usedSymbols { + if err := session.initSymbol(ctx, environ, symbol); err != nil { + return err + } + } + + return nil +} + +// initSymbol loads trades for the symbol, bind stream callbacks, init positions, market data store. +// please note, initSymbol can not be called for the same symbol for twice +func (session *ExchangeSession) initSymbol(ctx context.Context, environ *Environment, symbol string) error { + if _, ok := session.initializedSymbols[symbol]; ok { + // return fmt.Errorf("symbol %s is already initialized", symbol) + return nil + } + + market, ok := session.markets[symbol] + if !ok { + return fmt.Errorf("market %s is not defined", symbol) + } + + var err error + var trades []types.Trade + if environ.SyncService != nil && environ.BacktestService == nil { + tradingFeeCurrency := session.Exchange.PlatformFeeCurrency() + if strings.HasPrefix(symbol, tradingFeeCurrency) { + trades, err = environ.TradeService.QueryForTradingFeeCurrency(session.Exchange.Name(), symbol, tradingFeeCurrency) + } else { + trades, err = environ.TradeService.Query(service.QueryTradesOptions{ + Exchange: session.Exchange.Name(), + Symbol: symbol, + Ordering: "DESC", + Limit: 100, + }) + } + + if err != nil { + return err + } + + trades = types.SortTradesAscending(trades) + log.Infof("symbol %s: %d trades loaded", symbol, len(trades)) + } + + session.Trades[symbol] = &types.TradeSlice{Trades: trades} + session.UserDataStream.OnTradeUpdate(func(trade types.Trade) { + if trade.Symbol == symbol { + session.Trades[symbol].Append(trade) + } + }) + + position := &types.Position{ + Symbol: symbol, + BaseCurrency: market.BaseCurrency, + QuoteCurrency: market.QuoteCurrency, + } + position.AddTrades(trades) + position.BindStream(session.UserDataStream) + session.positions[symbol] = position + + orderStore := NewOrderStore(symbol) + orderStore.AddOrderUpdate = true + + orderStore.BindStream(session.UserDataStream) + session.orderStores[symbol] = orderStore - loadedSymbols: make(map[string]struct{}), + marketDataStore := NewMarketDataStore(symbol) + marketDataStore.BindStream(session.MarketDataStream) + session.marketDataStores[symbol] = marketDataStore + + standardIndicatorSet := NewStandardIndicatorSet(symbol, marketDataStore) + session.standardIndicatorSets[symbol] = standardIndicatorSet + + // used kline intervals by the given symbol + var klineSubscriptions = map[types.Interval]struct{}{} + + // always subscribe the 1m kline so we can make sure the connection persists. + klineSubscriptions[types.Interval1m] = struct{}{} + + // Aggregate the intervals that we are using in the subscriptions. + for _, sub := range session.Subscriptions { + switch sub.Channel { + case types.BookChannel: + book := types.NewStreamBook(sub.Symbol) + book.BindStream(session.MarketDataStream) + session.orderBooks[sub.Symbol] = book + + case types.KLineChannel: + if sub.Options.Interval == "" { + continue + } + + if sub.Symbol == symbol { + klineSubscriptions[types.Interval(sub.Options.Interval)] = struct{}{} + } + } + } + + for interval := range klineSubscriptions { + // avoid querying the last unclosed kline + endTime := environ.startTime + kLines, err := session.Exchange.QueryKLines(ctx, symbol, interval, types.KLineQueryOptions{ + EndTime: &endTime, + Limit: 1000, // indicators need at least 100 + }) + if err != nil { + return err + } + + if len(kLines) == 0 { + log.Warnf("no kline data for %s %s (end time <= %s)", symbol, interval, environ.startTime) + continue + } + + // update last prices by the given kline + lastKLine := kLines[len(kLines)-1] + if interval == types.Interval1m { + log.Infof("last kline %+v", lastKLine) + session.lastPrices[symbol] = lastKLine.Close + } + + for _, k := range kLines { + // let market data store trigger the update, so that the indicator could be updated too. + marketDataStore.AddKLine(k) + } } + + log.Infof("%s last price: %v", symbol, session.lastPrices[symbol]) + + session.initializedSymbols[symbol] = struct{}{} + return nil } func (session *ExchangeSession) StandardIndicatorSet(symbol string) (*StandardIndicatorSet, bool) { @@ -169,34 +583,85 @@ func (session *ExchangeSession) StandardIndicatorSet(symbol string) (*StandardIn return set, ok } -func (session *ExchangeSession) Position(symbol string) (pos *Position, ok bool) { +func (session *ExchangeSession) Position(symbol string) (pos *types.Position, ok bool) { pos, ok = session.positions[symbol] + if ok { + return pos, ok + } + + market, ok := session.markets[symbol] + if !ok { + return nil, false + } + + pos = &types.Position{ + Symbol: symbol, + BaseCurrency: market.BaseCurrency, + QuoteCurrency: market.QuoteCurrency, + } + ok = true + session.positions[symbol] = pos return pos, ok } +func (session *ExchangeSession) Positions() map[string]*types.Position { + return session.positions +} + // MarketDataStore returns the market data store of a symbol func (session *ExchangeSession) MarketDataStore(symbol string) (s *MarketDataStore, ok bool) { s, ok = session.marketDataStores[symbol] return s, ok } -func (session *ExchangeSession) StartPrice(symbol string) (price float64, ok bool) { +// OrderBook returns the personal orderbook of a symbol +func (session *ExchangeSession) OrderBook(symbol string) (s *types.StreamOrderBook, ok bool) { + s, ok = session.orderBooks[symbol] + return s, ok +} + +func (session *ExchangeSession) StartPrice(symbol string) (price fixedpoint.Value, ok bool) { price, ok = session.startPrices[symbol] return price, ok } -func (session *ExchangeSession) LastPrice(symbol string) (price float64, ok bool) { +func (session *ExchangeSession) LastPrice(symbol string) (price fixedpoint.Value, ok bool) { price, ok = session.lastPrices[symbol] return price, ok } +func (session *ExchangeSession) AllLastPrices() map[string]fixedpoint.Value { + return session.lastPrices +} + +func (session *ExchangeSession) LastPrices() map[string]fixedpoint.Value { + return session.lastPrices +} + func (session *ExchangeSession) Market(symbol string) (market types.Market, ok bool) { market, ok = session.markets[symbol] return market, ok } +func (session *ExchangeSession) Markets() map[string]types.Market { + return session.markets +} + +func (session *ExchangeSession) OrderStore(symbol string) (store *OrderStore, ok bool) { + store, ok = session.orderStores[symbol] + return store, ok +} + +func (session *ExchangeSession) OrderStores() map[string]*OrderStore { + return session.orderStores +} + // Subscribe save the subscription info, later it will be assigned to the stream func (session *ExchangeSession) Subscribe(channel types.Channel, symbol string, options types.SubscribeOptions) *ExchangeSession { + if channel == types.KLineChannel && len(options.Interval) == 0 { + panic("subscription interval for kline can not be empty") + } + sub := types.Subscription{ Channel: channel, Symbol: symbol, @@ -204,7 +669,7 @@ func (session *ExchangeSession) Subscribe(channel types.Channel, symbol string, } // add to the loaded symbol table - session.loadedSymbols[symbol] = struct{}{} + session.usedSymbols[symbol] = struct{}{} session.Subscriptions[sub] = sub return session } @@ -216,23 +681,307 @@ func (session *ExchangeSession) FormatOrder(order types.SubmitOrder) (types.Subm } order.Market = market + return order, nil +} - switch order.Type { - case types.OrderTypeStopMarket, types.OrderTypeStopLimit: - order.StopPriceString = market.FormatPrice(order.StopPrice) +func (session *ExchangeSession) UpdatePrices(ctx context.Context, currencies []string, fiat string) (err error) { + // TODO: move this cache check to the http routes + // if session.lastPriceUpdatedAt.After(time.Now().Add(-time.Hour)) { + // return nil + // } + var symbols []string + for _, c := range currencies { + symbols = append(symbols, c+fiat) // BTC/USDT + symbols = append(symbols, fiat+c) // USDT/TWD } - switch order.Type { - case types.OrderTypeMarket, types.OrderTypeStopMarket: - order.Price = 0.0 - order.PriceString = "" + tickers, err := session.Exchange.QueryTickers(ctx, symbols...) + if err != nil || len(tickers) == 0 { + return err + } - default: - order.PriceString = market.FormatPrice(order.Price) + var lastTime time.Time + for k, v := range tickers { + // for {Crypto}/USDT markets + session.lastPrices[k] = v.Last + if v.Time.After(lastTime) { + lastTime = v.Time + } + } + session.lastPriceUpdatedAt = lastTime + return err +} + +func (session *ExchangeSession) FindPossibleSymbols() (symbols []string, err error) { + // If the session is an isolated margin session, there will be only the isolated margin symbol + if session.Margin && session.IsolatedMargin { + return []string{ + session.IsolatedMarginSymbol, + }, nil } - order.QuantityString = market.FormatQuantity(order.Quantity) - return order, nil + var balances = session.GetAccount().Balances() + var fiatAssets []string + + for _, currency := range types.FiatCurrencies { + if balance, ok := balances[currency]; ok && balance.Total().Sign() > 0 { + fiatAssets = append(fiatAssets, currency) + } + } + + var symbolMap = map[string]struct{}{} + + for _, market := range session.Markets() { + // ignore the markets that are not fiat currency markets + if !util.StringSliceContains(fiatAssets, market.QuoteCurrency) { + continue + } + + // ignore the asset that we don't have in the balance sheet + balance, hasAsset := balances[market.BaseCurrency] + if !hasAsset || balance.Total().IsZero() { + continue + } + + symbolMap[market.Symbol] = struct{}{} + } + + for s := range symbolMap { + symbols = append(symbols, s) + } + + return symbols, nil +} + +// InitExchange initialize the exchange instance and allocate memory for fields +// In this stage, the session var could be loaded from the JSON config, so the pointer fields are still nil +// The Init method will be called after this stage, environment.Init will call the session.Init method later. +func (session *ExchangeSession) InitExchange(name string, ex types.Exchange) error { + var err error + var exchangeName = session.ExchangeName + if ex == nil { + if session.PublicOnly { + ex, err = exchange2.NewPublic(exchangeName) + } else { + if session.Key != "" && session.Secret != "" { + ex, err = exchange2.NewStandard(exchangeName, session.Key, session.Secret, session.Passphrase, session.SubAccount) + } else { + ex, err = exchange2.NewWithEnvVarPrefix(exchangeName, session.EnvVarPrefix) + } + } + } + + if err != nil { + return err + } + + // configure exchange + if session.Margin { + marginExchange, ok := ex.(types.MarginExchange) + if !ok { + return fmt.Errorf("exchange %s does not support margin", exchangeName) + } + + if session.IsolatedMargin { + marginExchange.UseIsolatedMargin(session.IsolatedMarginSymbol) + } else { + marginExchange.UseMargin() + } + } + + if session.Futures { + futuresExchange, ok := ex.(types.FuturesExchange) + if !ok { + return fmt.Errorf("exchange %s does not support futures", exchangeName) + } + + if session.IsolatedFutures { + futuresExchange.UseIsolatedFutures(session.IsolatedFuturesSymbol) + } else { + futuresExchange.UseFutures() + } + } + + session.Name = name + session.Notifiability = Notifiability{ + SymbolChannelRouter: NewPatternChannelRouter(nil), + SessionChannelRouter: NewPatternChannelRouter(nil), + ObjectChannelRouter: NewObjectChannelRouter(), + } + session.Exchange = ex + session.UserDataStream = ex.NewStream() + session.MarketDataStream = ex.NewStream() + session.MarketDataStream.SetPublicOnly() + + // pointer fields + session.Subscriptions = make(map[types.Subscription]types.Subscription) + session.Account = &types.Account{} + session.Trades = make(map[string]*types.TradeSlice) + + session.orderBooks = make(map[string]*types.StreamOrderBook) + session.markets = make(map[string]types.Market) + session.lastPrices = make(map[string]fixedpoint.Value) + session.startPrices = make(map[string]fixedpoint.Value) + session.marketDataStores = make(map[string]*MarketDataStore) + session.positions = make(map[string]*types.Position) + session.standardIndicatorSets = make(map[string]*StandardIndicatorSet) + session.orderStores = make(map[string]*OrderStore) + session.OrderExecutor = &ExchangeOrderExecutor{ + // copy the notification system so that we can route + Notifiability: session.Notifiability, + Session: session, + } + + session.usedSymbols = make(map[string]struct{}) + session.initializedSymbols = make(map[string]struct{}) + session.logger = log.WithField("session", name) + return nil +} + +func (session *ExchangeSession) MarginType() string { + margin := "none" + if session.Margin { + margin = "margin" + if session.IsolatedMargin { + margin = "isolated" + } + } + return margin +} + +func (session *ExchangeSession) metricsBalancesUpdater(balances types.BalanceMap) { + for currency, balance := range balances { + labels := prometheus.Labels{ + "exchange": session.ExchangeName.String(), + "margin": session.MarginType(), + "symbol": session.IsolatedMarginSymbol, + "currency": currency, + } + + metricsTotalBalances.With(labels).Set(balance.Total().Float64()) + metricsLockedBalances.With(labels).Set(balance.Locked.Float64()) + metricsAvailableBalances.With(labels).Set(balance.Available.Float64()) + metricsLastUpdateTimeBalance.With(prometheus.Labels{ + "exchange": session.ExchangeName.String(), + "margin": session.MarginType(), + "channel": "user", + "data_type": "balance", + "symbol": "", + "currency": currency, + }).SetToCurrentTime() + } + +} + +func (session *ExchangeSession) metricsOrderUpdater(order types.Order) { + metricsLastUpdateTimeBalance.With(prometheus.Labels{ + "exchange": session.ExchangeName.String(), + "margin": session.MarginType(), + "channel": "user", + "data_type": "order", + "symbol": order.Symbol, + "currency": "", + }).SetToCurrentTime() +} + +func (session *ExchangeSession) metricsTradeUpdater(trade types.Trade) { + labels := prometheus.Labels{ + "exchange": session.ExchangeName.String(), + "margin": session.MarginType(), + "side": trade.Side.String(), + "symbol": trade.Symbol, + "liquidity": trade.Liquidity(), + } + metricsTradingVolume.With(labels).Add(trade.Quantity.Mul(trade.Price).Float64()) + metricsTradesTotal.With(labels).Inc() + metricsLastUpdateTimeBalance.With(prometheus.Labels{ + "exchange": session.ExchangeName.String(), + "margin": session.MarginType(), + "channel": "user", + "data_type": "trade", + "symbol": trade.Symbol, + "currency": "", + }).SetToCurrentTime() +} + +func (session *ExchangeSession) bindMarketDataStreamMetrics(stream types.Stream) { + stream.OnBookUpdate(func(book types.SliceOrderBook) { + metricsLastUpdateTimeBalance.With(prometheus.Labels{ + "exchange": session.ExchangeName.String(), + "margin": session.MarginType(), + "channel": "market", + "data_type": "book", + "symbol": book.Symbol, + "currency": "", + }).SetToCurrentTime() + }) + stream.OnKLineClosed(func(kline types.KLine) { + metricsLastUpdateTimeBalance.With(prometheus.Labels{ + "exchange": session.ExchangeName.String(), + "margin": session.MarginType(), + "channel": "market", + "data_type": "kline", + "symbol": kline.Symbol, + "currency": "", + }).SetToCurrentTime() + }) +} + +func (session *ExchangeSession) bindUserDataStreamMetrics(stream types.Stream) { + stream.OnBalanceUpdate(session.metricsBalancesUpdater) + stream.OnBalanceSnapshot(session.metricsBalancesUpdater) + stream.OnTradeUpdate(session.metricsTradeUpdater) + stream.OnOrderUpdate(session.metricsOrderUpdater) + stream.OnDisconnect(func() { + metricsConnectionStatus.With(prometheus.Labels{ + "channel": "user", + "exchange": session.ExchangeName.String(), + "margin": session.MarginType(), + "symbol": session.IsolatedMarginSymbol, + }).Set(0.0) + }) + stream.OnConnect(func() { + metricsConnectionStatus.With(prometheus.Labels{ + "channel": "user", + "exchange": session.ExchangeName.String(), + "margin": session.MarginType(), + "symbol": session.IsolatedMarginSymbol, + }).Set(1.0) + }) +} + +func (session *ExchangeSession) bindConnectionStatusNotification(stream types.Stream, streamName string) { + stream.OnDisconnect(func() { + Notify("session %s %s stream disconnected", session.Name, streamName) + }) + stream.OnConnect(func() { + Notify("session %s %s stream connected", session.Name, streamName) + }) +} + +func (session *ExchangeSession) SlackAttachment() slack.Attachment { + var fields []slack.AttachmentField + var footerIcon = types.ExchangeFooterIcon(session.ExchangeName) + return slack.Attachment{ + // Pretext: "", + // Text: text, + Title: session.Name, + Fields: fields, + FooterIcon: footerIcon, + Footer: util.Render("update time {{ . }}", time.Now().Format(time.RFC822)), + } +} + +func (session *ExchangeSession) FormatOrders(orders []types.SubmitOrder) (formattedOrders []types.SubmitOrder, err error) { + for _, order := range orders { + o, err := session.FormatOrder(order) + if err != nil { + return formattedOrders, err + } + formattedOrders = append(formattedOrders, o) + } + + return formattedOrders, err } diff --git a/pkg/bbgo/smart_stops.go b/pkg/bbgo/smart_stops.go new file mode 100644 index 0000000000..17a3ff8a8f --- /dev/null +++ b/pkg/bbgo/smart_stops.go @@ -0,0 +1,287 @@ +package bbgo + +import ( + "context" + "errors" + + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +type TrailingStop struct { + // CallbackRate is the callback rate from the previous high price + CallbackRate fixedpoint.Value `json:"callbackRate,omitempty"` + + // ClosePosition is a percentage of the position to be closed + ClosePosition fixedpoint.Value `json:"closePosition,omitempty"` + + // MinProfit is the percentage of the minimum profit ratio. + // Stop order will be activiated only when the price reaches above this threshold. + MinProfit fixedpoint.Value `json:"minProfit,omitempty"` + + // Interval is the time resolution to update the stop order + // KLine per Interval will be used for updating the stop order + Interval types.Interval `json:"interval,omitempty"` + + // Virtual is used when you don't want to place the real order on the exchange and lock the balance. + // You want to handle the stop order by the strategy itself. + Virtual bool `json:"virtual,omitempty"` +} + +type TrailingStopController struct { + *TrailingStop + + Symbol string + + position *types.Position + latestHigh fixedpoint.Value + averageCost fixedpoint.Value + + // activated: when the price reaches the min profit price, we set the activated to true to enable trailing stop + activated bool +} + +func NewTrailingStopController(symbol string, config *TrailingStop) *TrailingStopController { + return &TrailingStopController{ + TrailingStop: config, + Symbol: symbol, + } +} + +func (c *TrailingStopController) Subscribe(session *ExchangeSession) { + session.Subscribe(types.KLineChannel, c.Symbol, types.SubscribeOptions{ + Interval: c.Interval, + }) +} + +func (c *TrailingStopController) Run(ctx context.Context, session *ExchangeSession, tradeCollector *TradeCollector) { + // store the position + c.position = tradeCollector.Position() + c.averageCost = c.position.AverageCost + + // Use trade collector to get the position update event + tradeCollector.OnPositionUpdate(func(position *types.Position) { + // update average cost if we have it. + c.averageCost = position.AverageCost + }) + + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + if kline.Symbol != c.Symbol || kline.Interval != c.Interval { + return + } + + // if average cost is zero, we don't need trailing stop + if c.averageCost.IsZero() || c.position == nil { + return + } + + closePrice := kline.Close + + // if we don't hold position, we just skip dust position + if c.position.Base.Abs().Compare(c.position.Market.MinQuantity) < 0 || c.position.Base.Abs().Mul(closePrice).Compare(c.position.Market.MinNotional) < 0 { + return + } + + if c.MinProfit.Sign() <= 0 { + // when minProfit is not set, we should always activate the trailing stop order + c.activated = true + } else if closePrice.Compare(c.averageCost) > 0 || + changeRate(closePrice, c.averageCost).Compare(c.MinProfit) > 0 { + + if !c.activated { + log.Infof("%s trailing stop activated at price %s", c.Symbol, closePrice.String()) + c.activated = true + } + } else { + return + } + + if !c.activated { + return + } + + // if the trailing stop order is activated, we should update the latest high + // update the latest high + c.latestHigh = fixedpoint.Max(closePrice, c.latestHigh) + + // if it's in the callback rate, we don't want to trigger stop + if closePrice.Compare(c.latestHigh) < 0 && changeRate(closePrice, c.latestHigh).Compare(c.CallbackRate) < 0 { + return + } + + if c.Virtual { + // if the profit rate is defined, and it is less than our minimum profit rate, we skip stop + if c.MinProfit.Sign() > 0 && + closePrice.Compare(c.averageCost) < 0 || + changeRate(closePrice, c.averageCost).Compare(c.MinProfit) < 0 { + return + } + + log.Infof("%s trailing stop emitted, latest high: %s, closed price: %s, average cost: %s, profit spread: %s", + c.Symbol, + c.latestHigh.String(), + closePrice.String(), + c.averageCost.String(), + closePrice.Sub(c.averageCost).String()) + + log.Infof("current %s position: %s", c.Symbol, c.position.String()) + + marketOrder := c.position.NewMarketCloseOrder(c.ClosePosition) + if marketOrder != nil { + log.Infof("submitting %s market order to stop: %+v", c.Symbol, marketOrder) + + // skip dust order + if marketOrder.Quantity.Mul(closePrice).Compare(c.position.Market.MinNotional) < 0 { + log.Warnf("%s market order quote quantity %s < min notional %s, skip placing order", c.Symbol, marketOrder.Quantity.Mul(closePrice).String(), c.position.Market.MinNotional.String()) + return + } + + createdOrders, err := session.Exchange.SubmitOrders(ctx, *marketOrder) + if err != nil { + log.WithError(err).Errorf("stop market order place error") + return + } + tradeCollector.OrderStore().Add(createdOrders...) + tradeCollector.Process() + + // reset the state + c.latestHigh = fixedpoint.Zero + c.activated = false + } + } else { + // place stop order only when the closed price is greater than the current average cost + if c.MinProfit.Sign() > 0 && closePrice.Compare(c.averageCost) > 0 && + changeRate(closePrice, c.averageCost).Compare(c.MinProfit) >= 0 { + + stopPrice := c.averageCost.Mul(fixedpoint.One.Add(c.MinProfit)) + orderForm := c.GenerateStopOrder(stopPrice, c.averageCost) + if orderForm != nil { + log.Infof("updating %s stop limit order to simulate trailing stop order...", c.Symbol) + + createdOrders, err := session.Exchange.SubmitOrders(ctx, *orderForm) + if err != nil { + log.WithError(err).Errorf("%s stop order place error", c.Symbol) + return + } + + tradeCollector.OrderStore().Add(createdOrders...) + tradeCollector.Process() + } + } + } + }) +} + +func (c *TrailingStopController) GenerateStopOrder(stopPrice, price fixedpoint.Value) *types.SubmitOrder { + base := c.position.GetBase() + if base.IsZero() { + return nil + } + + quantity := base.Abs() + quoteQuantity := price.Mul(quantity) + + if c.ClosePosition.Sign() > 0 { + quantity = quantity.Mul(c.ClosePosition) + } + + // skip dust orders + if quantity.Compare(c.position.Market.MinQuantity) < 0 || + quoteQuantity.Compare(c.position.Market.MinNotional) < 0 { + return nil + } + + side := types.SideTypeSell + if base.Sign() < 0 { + side = types.SideTypeBuy + } + + return &types.SubmitOrder{ + Symbol: c.Symbol, + Market: c.position.Market, + Type: types.OrderTypeStopLimit, + Side: side, + StopPrice: stopPrice, + Price: price, + Quantity: quantity, + } +} + +type FixedStop struct{} + +type Stop struct { + TrailingStop *TrailingStop `json:"trailingStop,omitempty"` + FixedStop *FixedStop `json:"fixedStop,omitempty"` +} + +// SmartStops shares the stop order logics between different strategies +// +// See also: +// - Stop-Loss order: https://www.investopedia.com/terms/s/stop-lossorder.asp +// - Trailing Stop-loss order: https://www.investopedia.com/articles/trading/08/trailing-stop-loss.asp +// +// How to integrate this into your strategy? +// +// To use the stop controllers, you can embed this struct into your Strategy struct +// +// func (s *Strategy) Initialize() error { +// return s.SmartStops.InitializeStopControllers(s.Symbol) +// } +// func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { +// s.SmartStops.Subscribe(session) +// } +// +// func (s *Strategy) Run() { +// s.SmartStops.RunStopControllers(ctx, session, s.tradeCollector) +// } +// +type SmartStops struct { + // Stops is the slice of the stop order config + Stops []Stop `json:"stops,omitempty"` + + // StopControllers are constructed from the stop config + StopControllers []StopController `json:"-"` +} + +type StopController interface { + Subscribe(session *ExchangeSession) + Run(ctx context.Context, session *ExchangeSession, tradeCollector *TradeCollector) +} + +func (s *SmartStops) newStopController(symbol string, config Stop) (StopController, error) { + if config.TrailingStop != nil { + return NewTrailingStopController(symbol, config.TrailingStop), nil + } + + return nil, errors.New("incorrect stop controller setup") +} + +func (s *SmartStops) InitializeStopControllers(symbol string) error { + for _, stop := range s.Stops { + controller, err := s.newStopController(symbol, stop) + if err != nil { + return err + } + + s.StopControllers = append(s.StopControllers, controller) + } + return nil +} + +func (s *SmartStops) Subscribe(session *ExchangeSession) { + for _, stopController := range s.StopControllers { + stopController.Subscribe(session) + } +} + +func (s *SmartStops) RunStopControllers(ctx context.Context, session *ExchangeSession, tradeCollector *TradeCollector) { + for _, stopController := range s.StopControllers { + stopController.Run(ctx, session, tradeCollector) + } +} + +func changeRate(a, b fixedpoint.Value) fixedpoint.Value { + return a.Sub(b).Div(b).Abs() +} diff --git a/pkg/bbgo/strategy_controller.go b/pkg/bbgo/strategy_controller.go new file mode 100644 index 0000000000..e9e8662583 --- /dev/null +++ b/pkg/bbgo/strategy_controller.go @@ -0,0 +1,57 @@ +package bbgo + +import ( + "github.com/c9s/bbgo/pkg/types" +) + +//go:generate callbackgen -type StrategyController -interface +type StrategyController struct { + Status types.StrategyStatus + + // Callbacks + suspendCallbacks []func() + resumeCallbacks []func() + emergencyStopCallbacks []func() +} + +func (s *StrategyController) GetStatus() types.StrategyStatus { + return s.Status +} + +func (s *StrategyController) Suspend() error { + s.Status = types.StrategyStatusStopped + + s.EmitSuspend() + + return nil +} + +func (s *StrategyController) Resume() error { + s.Status = types.StrategyStatusRunning + + s.EmitResume() + + return nil +} + +func (s *StrategyController) EmergencyStop() error { + s.Status = types.StrategyStatusStopped + + s.EmitEmergencyStop() + + return nil +} + +type StrategyStatusReader interface { + GetStatus() types.StrategyStatus +} + +type StrategyToggler interface { + StrategyStatusReader + Suspend() error + Resume() error +} + +type EmergencyStopper interface { + EmergencyStop() error +} diff --git a/pkg/bbgo/strategycontroller_callbacks.go b/pkg/bbgo/strategycontroller_callbacks.go new file mode 100644 index 0000000000..dd432fcb57 --- /dev/null +++ b/pkg/bbgo/strategycontroller_callbacks.go @@ -0,0 +1,43 @@ +// Code generated by "callbackgen -type StrategyController -interface"; DO NOT EDIT. + +package bbgo + +import () + +func (s *StrategyController) OnSuspend(cb func()) { + s.suspendCallbacks = append(s.suspendCallbacks, cb) +} + +func (s *StrategyController) EmitSuspend() { + for _, cb := range s.suspendCallbacks { + cb() + } +} + +func (s *StrategyController) OnResume(cb func()) { + s.resumeCallbacks = append(s.resumeCallbacks, cb) +} + +func (s *StrategyController) EmitResume() { + for _, cb := range s.resumeCallbacks { + cb() + } +} + +func (s *StrategyController) OnEmergencyStop(cb func()) { + s.emergencyStopCallbacks = append(s.emergencyStopCallbacks, cb) +} + +func (s *StrategyController) EmitEmergencyStop() { + for _, cb := range s.emergencyStopCallbacks { + cb() + } +} + +type StrategyControllerEventHub interface { + OnSuspend(cb func()) + + OnResume(cb func()) + + OnEmergencyStop(cb func()) +} diff --git a/pkg/bbgo/string.go b/pkg/bbgo/string.go index 920078f66e..f30d11b655 100644 --- a/pkg/bbgo/string.go +++ b/pkg/bbgo/string.go @@ -1,2 +1 @@ package bbgo - diff --git a/pkg/bbgo/testdata/backtest.yaml b/pkg/bbgo/testdata/backtest.yaml index 344169b0cb..7c96b81d25 100644 --- a/pkg/bbgo/testdata/backtest.yaml +++ b/pkg/bbgo/testdata/backtest.yaml @@ -14,13 +14,12 @@ backtest: # https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp startTime: "2020-01-01" account: - makerCommission: 15 - takerCommission: 15 - buyerCommission: 0 - sellerCommission: 0 - balances: - BTC: 1.0 - USDT: 5000.0 + binance: + makerFeeRate: 15 + takerFeeRate: 15 + balances: + BTC: 1.0 + USDT: 5000.0 exchangeStrategies: diff --git a/pkg/bbgo/testdata/persistence.yaml b/pkg/bbgo/testdata/persistence.yaml index 457303b33b..c0dff8214f 100644 --- a/pkg/bbgo/testdata/persistence.yaml +++ b/pkg/bbgo/testdata/persistence.yaml @@ -19,7 +19,8 @@ persistence: database: "persistence" strategies: - swing: +- on: max + test: symbolPosition: persistence: type: json diff --git a/pkg/bbgo/testdata/strategy.yaml b/pkg/bbgo/testdata/strategy.yaml index b75692b4f3..e04e630e1a 100644 --- a/pkg/bbgo/testdata/strategy.yaml +++ b/pkg/bbgo/testdata/strategy.yaml @@ -1,29 +1,21 @@ --- -imports: -- github.com/c9s/bbgo/pkg/strategy/buyandhold - sessions: max: exchange: max - envVarPrefix: max - + envVarPrefix: MAX + takerFeeRate: 0 + makerFeeRate: 0 binance: exchange: binance - envVarPrefix: binance + envVarPrefix: BINANCE + takerFeeRate: 0 + makerFeeRate: 0 exchangeStrategies: -- on: binance +- on: ["binance"] test: symbol: "BTCUSDT" interval: "1m" baseQuantity: 0.1 minDropPercentage: -0.05 - -reportPnL: -- averageCostBySymbols: - - "BTCUSDT" - - "BNBUSDT" - of: binance - when: - - "@daily" - - "@hourly" + maxAssetQuantity: 1.1 diff --git a/pkg/bbgo/time.go b/pkg/bbgo/time.go new file mode 100644 index 0000000000..2215858034 --- /dev/null +++ b/pkg/bbgo/time.go @@ -0,0 +1,15 @@ +package bbgo + +import ( + "time" +) + +var LocalTimeZone *time.Location + +func init() { + var err error + LocalTimeZone, err = time.LoadLocation("Local") + if err != nil { + panic(err) + } +} diff --git a/pkg/bbgo/trade_store.go b/pkg/bbgo/trade_store.go new file mode 100644 index 0000000000..eab6cca31c --- /dev/null +++ b/pkg/bbgo/trade_store.go @@ -0,0 +1,92 @@ +package bbgo + +import ( + "sync" + + "github.com/c9s/bbgo/pkg/types" +) + +type TradeStore struct { + // any created trades for tracking trades + sync.Mutex + + trades map[uint64]types.Trade + + Symbol string + RemoveCancelled bool + RemoveFilled bool + AddOrderUpdate bool +} + +func NewTradeStore(symbol string) *TradeStore { + return &TradeStore{ + Symbol: symbol, + trades: make(map[uint64]types.Trade), + } +} + +func (s *TradeStore) Num() (num int) { + s.Lock() + num = len(s.trades) + s.Unlock() + return num +} + +func (s *TradeStore) Trades() (trades []types.Trade) { + s.Lock() + defer s.Unlock() + + for _, o := range s.trades { + trades = append(trades, o) + } + + return trades +} + +func (s *TradeStore) Exists(oID uint64) (ok bool) { + s.Lock() + defer s.Unlock() + + _, ok = s.trades[oID] + return ok +} + +func (s *TradeStore) Clear() { + s.Lock() + s.trades = make(map[uint64]types.Trade) + s.Unlock() +} + +type TradeFilter func(trade types.Trade) bool + +func (s *TradeStore) Filter(filter TradeFilter) { + s.Lock() + var trades = make(map[uint64]types.Trade) + for _, trade := range s.trades { + if !filter(trade) { + trades[trade.ID] = trade + } + } + s.trades = trades + s.Unlock() +} + +func (s *TradeStore) GetAndClear() (trades []types.Trade) { + s.Lock() + for _, o := range s.trades { + trades = append(trades, o) + } + s.trades = make(map[uint64]types.Trade) + s.Unlock() + + return trades +} + +func (s *TradeStore) Add(trades ...types.Trade) { + s.Lock() + defer s.Unlock() + + for _, trade := range trades { + s.trades[trade.ID] = trade + } +} diff --git a/pkg/bbgo/tradecollector.go b/pkg/bbgo/tradecollector.go new file mode 100644 index 0000000000..0c23798f3d --- /dev/null +++ b/pkg/bbgo/tradecollector.go @@ -0,0 +1,204 @@ +package bbgo + +import ( + "context" + "time" + + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/sigchan" + "github.com/c9s/bbgo/pkg/types" +) + +//go:generate callbackgen -type TradeCollector +type TradeCollector struct { + Symbol string + orderSig sigchan.Chan + + tradeStore *TradeStore + tradeC chan types.Trade + position *types.Position + orderStore *OrderStore + doneTrades map[types.TradeKey]struct{} + + recoverCallbacks []func(trade types.Trade) + + tradeCallbacks []func(trade types.Trade, profit, netProfit fixedpoint.Value) + + positionUpdateCallbacks []func(position *types.Position) + profitCallbacks []func(trade types.Trade, profit *types.Profit) +} + +func NewTradeCollector(symbol string, position *types.Position, orderStore *OrderStore) *TradeCollector { + return &TradeCollector{ + Symbol: symbol, + orderSig: sigchan.New(1), + + tradeC: make(chan types.Trade, 100), + tradeStore: NewTradeStore(symbol), + doneTrades: make(map[types.TradeKey]struct{}), + position: position, + orderStore: orderStore, + } +} + +// OrderStore returns the order store used by the trade collector +func (c *TradeCollector) OrderStore() *OrderStore { + return c.orderStore +} + +// Position returns the position used by the trade collector +func (c *TradeCollector) Position() *types.Position { + return c.position +} + +func (c *TradeCollector) SetPosition(position *types.Position) { + c.position = position +} + +// QueueTrade sends the trade object to the trade channel, +// so that the goroutine can receive the trade and process in the background. +func (c *TradeCollector) QueueTrade(trade types.Trade) { + c.tradeC <- trade +} + +// BindStreamForBackground bind the stream callback for background processing +func (c *TradeCollector) BindStreamForBackground(stream types.Stream) { + stream.OnTradeUpdate(c.QueueTrade) +} + +func (c *TradeCollector) BindStream(stream types.Stream) { + stream.OnTradeUpdate(func(trade types.Trade) { + c.ProcessTrade(trade) + }) +} + +// Emit triggers the trade processing (position update) +// If you sent order, and the order store is updated, you can call this method +// so that trades will be processed in the next round of the goroutine loop +func (c *TradeCollector) Emit() { + c.orderSig.Emit() +} + +func (c *TradeCollector) Recover(ctx context.Context, ex types.ExchangeTradeHistoryService, symbol string, from time.Time) error { + trades, err := ex.QueryTrades(ctx, symbol, &types.TradeQueryOptions{ + StartTime: &from, + }) + + if err != nil { + return err + } + + for _, td := range trades { + log.Debugf("processing trade: %s", td.String()) + if c.ProcessTrade(td) { + log.Infof("recovered trade: %s", td.String()) + c.EmitRecover(td) + } + } + return nil +} + +// Process filters the received trades and see if there are orders matching the trades +// if we have the order in the order store, then the trade will be considered for the position. +// profit will also be calculated. +func (c *TradeCollector) Process() bool { + positionChanged := false + c.tradeStore.Filter(func(trade types.Trade) bool { + key := trade.Key() + + // if it's already done, remove the trade from the trade store + if _, done := c.doneTrades[key]; done { + return true + } + + if c.orderStore.Exists(trade.OrderID) { + c.doneTrades[key] = struct{}{} + profit, netProfit, madeProfit := c.position.AddTrade(trade) + if madeProfit { + p := c.position.NewProfit(trade, profit, netProfit) + c.EmitTrade(trade, profit, netProfit) + c.EmitProfit(trade, &p) + } else { + c.EmitTrade(trade, fixedpoint.Zero, fixedpoint.Zero) + c.EmitProfit(trade, nil) + } + positionChanged = true + return true + } + return false + }) + if positionChanged { + c.EmitPositionUpdate(c.position) + } + + return positionChanged +} + +// processTrade takes a trade and see if there is a matched order +// if the order is found, then we add the trade to the position +// return true when the given trade is added +// return false when the given trade is not added +func (c *TradeCollector) processTrade(trade types.Trade) bool { + if c.orderStore.Exists(trade.OrderID) { + key := trade.Key() + + // if it's already done, remove the trade from the trade store + if _, done := c.doneTrades[key]; done { + return false + } + + profit, netProfit, madeProfit := c.position.AddTrade(trade) + if madeProfit { + p := c.position.NewProfit(trade, profit, netProfit) + c.EmitTrade(trade, profit, netProfit) + c.EmitProfit(trade, &p) + } else { + c.EmitTrade(trade, fixedpoint.Zero, fixedpoint.Zero) + c.EmitProfit(trade, nil) + } + c.EmitPositionUpdate(c.position) + c.doneTrades[key] = struct{}{} + return true + } + return false +} + +// return true when the given trade is added +// return false when the given trade is not added +func (c *TradeCollector) ProcessTrade(trade types.Trade) bool { + key := trade.Key() + // if it's already done, remove the trade from the trade store + if _, done := c.doneTrades[key]; done { + return false + } + + if c.processTrade(trade) { + return true + } + + c.tradeStore.Add(trade) + return false +} + +// Run is a goroutine executed in the background +// Do not use this function if you need back-testing +func (c *TradeCollector) Run(ctx context.Context) { + var ticker = time.NewTicker(3 * time.Second) + for { + select { + case <-ctx.Done(): + return + + case <-ticker.C: + c.Process() + + case <-c.orderSig: + c.Process() + + case trade := <-c.tradeC: + c.ProcessTrade(trade) + } + } +} diff --git a/pkg/bbgo/tradecollector_callbacks.go b/pkg/bbgo/tradecollector_callbacks.go new file mode 100644 index 0000000000..44756224f9 --- /dev/null +++ b/pkg/bbgo/tradecollector_callbacks.go @@ -0,0 +1,48 @@ +// Code generated by "callbackgen -type TradeCollector"; DO NOT EDIT. + +package bbgo + +import ( + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func (c *TradeCollector) OnRecover(cb func(trade types.Trade)) { + c.recoverCallbacks = append(c.recoverCallbacks, cb) +} + +func (c *TradeCollector) EmitRecover(trade types.Trade) { + for _, cb := range c.recoverCallbacks { + cb(trade) + } +} + +func (c *TradeCollector) OnTrade(cb func(trade types.Trade, profit fixedpoint.Value, netProfit fixedpoint.Value)) { + c.tradeCallbacks = append(c.tradeCallbacks, cb) +} + +func (c *TradeCollector) EmitTrade(trade types.Trade, profit fixedpoint.Value, netProfit fixedpoint.Value) { + for _, cb := range c.tradeCallbacks { + cb(trade, profit, netProfit) + } +} + +func (c *TradeCollector) OnPositionUpdate(cb func(position *types.Position)) { + c.positionUpdateCallbacks = append(c.positionUpdateCallbacks, cb) +} + +func (c *TradeCollector) EmitPositionUpdate(position *types.Position) { + for _, cb := range c.positionUpdateCallbacks { + cb(position) + } +} + +func (c *TradeCollector) OnProfit(cb func(trade types.Trade, profit *types.Profit)) { + c.profitCallbacks = append(c.profitCallbacks, cb) +} + +func (c *TradeCollector) EmitProfit(trade types.Trade, profit *types.Profit) { + for _, cb := range c.profitCallbacks { + cb(trade, profit) + } +} diff --git a/pkg/bbgo/tradecollector_test.go b/pkg/bbgo/tradecollector_test.go new file mode 100644 index 0000000000..149eb49974 --- /dev/null +++ b/pkg/bbgo/tradecollector_test.go @@ -0,0 +1,65 @@ +package bbgo + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func TestTradeCollector_ShouldNotCountDuplicatedTrade(t *testing.T) { + symbol := "BTCUSDT" + position := types.NewPosition(symbol, "BTC", "USDT") + orderStore := NewOrderStore(symbol) + collector := NewTradeCollector(symbol, position, orderStore) + assert.NotNil(t, collector) + + matched := collector.ProcessTrade(types.Trade{ + ID: 1, + OrderID: 399, + Exchange: types.ExchangeBinance, + Price: fixedpoint.NewFromInt(40000), + Quantity: fixedpoint.One, + QuoteQuantity: fixedpoint.NewFromInt(40000), + Symbol: "BTCUSDT", + Side: types.SideTypeBuy, + IsBuyer: true, + }) + assert.False(t, matched, "should be added to the trade store") + assert.Equal(t, 1, len(collector.tradeStore.Trades()), "should have one trade in the trade store") + + orderStore.Add(types.Order{ + SubmitOrder: types.SubmitOrder{ + Symbol: "BTCUSDT", + Side: types.SideTypeBuy, + Type: types.OrderTypeLimit, + Quantity: fixedpoint.One, + Price: fixedpoint.NewFromInt(40000), + }, + Exchange: types.ExchangeBinance, + OrderID: 399, + Status: types.OrderStatusFilled, + ExecutedQuantity: fixedpoint.One, + IsWorking: false, + }) + + matched = collector.Process() + assert.True(t, matched) + assert.Equal(t, 0, len(collector.tradeStore.Trades()), "the found trade should be removed from the trade store") + + matched = collector.ProcessTrade(types.Trade{ + ID: 1, + OrderID: 399, + Exchange: types.ExchangeBinance, + Price: fixedpoint.NewFromInt(40000), + Quantity: fixedpoint.One, + QuoteQuantity: fixedpoint.NewFromInt(40000), + Symbol: "BTCUSDT", + Side: types.SideTypeBuy, + IsBuyer: true, + }) + assert.False(t, matched, "the same trade should not match") + assert.Equal(t, 0, len(collector.tradeStore.Trades()), "the same trade should not be added to the trade store") +} diff --git a/pkg/bbgo/trader.go b/pkg/bbgo/trader.go index 79e97b1383..4fc35e08e1 100644 --- a/pkg/bbgo/trader.go +++ b/pkg/bbgo/trader.go @@ -4,22 +4,31 @@ import ( "context" "fmt" "reflect" - "sync" + "github.com/pkg/errors" log "github.com/sirupsen/logrus" - "github.com/c9s/bbgo/pkg/types" - _ "github.com/go-sql-driver/mysql" + + "github.com/c9s/bbgo/pkg/interact" ) -var SupportedExchanges = []types.ExchangeName{"binance", "max"} +type StrategyID interface { + ID() string +} // SingleExchangeStrategy represents the single Exchange strategy type SingleExchangeStrategy interface { + StrategyID Run(ctx context.Context, orderExecutor OrderExecutor, session *ExchangeSession) error } +type StrategyInitializer interface { + Initialize() error +} + +// ExchangeSessionSubscriber provides an interface for collecting subscriptions from different strategies +// Subscribe method will be called before the user data stream connection is created. type ExchangeSessionSubscriber interface { Subscribe(session *ExchangeSession) } @@ -29,21 +38,12 @@ type CrossExchangeSessionSubscriber interface { } type CrossExchangeStrategy interface { + StrategyID CrossRun(ctx context.Context, orderExecutionRouter OrderExecutionRouter, sessions map[string]*ExchangeSession) error } -//go:generate callbackgen -type Graceful -type Graceful struct { - shutdownCallbacks []func(ctx context.Context, wg *sync.WaitGroup) -} - -func (g *Graceful) Shutdown(ctx context.Context) { - var wg sync.WaitGroup - wg.Add(len(g.shutdownCallbacks)) - - go g.EmitShutdown(ctx, &wg) - - wg.Wait() +type Validator interface { + Validate() error } type Logging interface { @@ -59,9 +59,9 @@ type Logger interface { type SilentLogger struct{} -func (logger *SilentLogger) Infof(message string, args ...interface{}) {} -func (logger *SilentLogger) Warnf(message string, args ...interface{}) {} -func (logger *SilentLogger) Errorf(message string, args ...interface{}) {} +func (logger *SilentLogger) Infof(string, ...interface{}) {} +func (logger *SilentLogger) Warnf(string, ...interface{}) {} +func (logger *SilentLogger) Errorf(string, ...interface{}) {} type Trader struct { environment *Environment @@ -92,6 +92,28 @@ func (trader *Trader) DisableLogging() { trader.logger = &SilentLogger{} } +func (trader *Trader) Configure(userConfig *Config) error { + if userConfig.RiskControls != nil { + trader.SetRiskControls(userConfig.RiskControls) + } + + for _, entry := range userConfig.ExchangeStrategies { + for _, mount := range entry.Mounts { + log.Infof("attaching strategy %T on %s...", entry.Strategy, mount) + if err := trader.AttachStrategyOn(mount, entry.Strategy); err != nil { + return err + } + } + } + + for _, strategy := range userConfig.CrossExchangeStrategies { + log.Infof("attaching cross exchange strategy %T", strategy) + trader.AttachCrossExchangeStrategy(strategy) + } + + return nil +} + // AttachStrategyOn attaches the single exchange strategy on an exchange Session. // Single exchange strategy is the default behavior. func (trader *Trader) AttachStrategyOn(session string, strategies ...SingleExchangeStrategy) error { @@ -108,9 +130,8 @@ func (trader *Trader) AttachStrategyOn(session string, strategies ...SingleExcha return fmt.Errorf("session %s is not defined, valid sessions are: %v", session, keys) } - for _, s := range strategies { - trader.exchangeStrategies[session] = append(trader.exchangeStrategies[session], s) - } + trader.exchangeStrategies[session] = append( + trader.exchangeStrategies[session], strategies...) return nil } @@ -122,194 +143,287 @@ func (trader *Trader) AttachCrossExchangeStrategy(strategy CrossExchangeStrategy return trader } +// SetRiskControls sets the risk controller // TODO: provide a more DSL way to configure risk controls func (trader *Trader) SetRiskControls(riskControls *RiskControls) { trader.riskControls = riskControls } -func (trader *Trader) Run(ctx context.Context) error { +func (trader *Trader) Subscribe() { // pre-subscribe the data for sessionName, strategies := range trader.exchangeStrategies { session := trader.environment.sessions[sessionName] for _, strategy := range strategies { + if initializer, ok := strategy.(StrategyInitializer); ok { + if err := initializer.Initialize(); err != nil { + panic(err) + } + } + if subscriber, ok := strategy.(ExchangeSessionSubscriber); ok { subscriber.Subscribe(session) + } else { + log.Errorf("strategy %s does not implement ExchangeSessionSubscriber", strategy.ID()) } } } for _, strategy := range trader.crossExchangeStrategies { + if initializer, ok := strategy.(StrategyInitializer); ok { + if err := initializer.Initialize(); err != nil { + panic(err) + } + } + if subscriber, ok := strategy.(CrossExchangeSessionSubscriber); ok { subscriber.CrossSubscribe(trader.environment.sessions) + } else { + log.Errorf("strategy %s does not implement CrossExchangeSessionSubscriber", strategy.ID()) } } +} + +func (trader *Trader) RunSingleExchangeStrategy(ctx context.Context, strategy SingleExchangeStrategy, session *ExchangeSession, orderExecutor OrderExecutor) error { + rs := reflect.ValueOf(strategy) + + // get the struct element + rs = rs.Elem() + + if rs.Kind() != reflect.Struct { + return errors.New("strategy object is not a struct") + } - if err := trader.environment.Init(ctx); err != nil { + if err := trader.injectCommonServices(strategy); err != nil { return err } - // load and run Session strategies - for sessionName, strategies := range trader.exchangeStrategies { - var session = trader.environment.sessions[sessionName] + if err := injectField(rs, "OrderExecutor", orderExecutor, false); err != nil { + return errors.Wrapf(err, "failed to inject OrderExecutor on %T", strategy) + } + + if symbol, ok := isSymbolBasedStrategy(rs); ok { + log.Infof("found symbol based strategy from %s", rs.Type()) - var baseOrderExecutor = &ExchangeOrderExecutor{ - // copy the environment notification system so that we can route - Notifiability: trader.environment.Notifiability, - Session: session, + market, ok := session.Market(symbol) + if !ok { + return fmt.Errorf("market of symbol %s not found", symbol) } - // forward trade updates and order updates to the order executor - session.Stream.OnTradeUpdate(baseOrderExecutor.EmitTradeUpdate) - session.Stream.OnOrderUpdate(baseOrderExecutor.EmitOrderUpdate) + indicatorSet, ok := session.StandardIndicatorSet(symbol) + if !ok { + return fmt.Errorf("standardIndicatorSet of symbol %s not found", symbol) + } - // default to base order executor - var orderExecutor OrderExecutor = baseOrderExecutor + store, ok := session.MarketDataStore(symbol) + if !ok { + return fmt.Errorf("marketDataStore of symbol %s not found", symbol) + } - // Since the risk controls are loaded from the config file - if riskControls := trader.riskControls; riskControls != nil { - if trader.riskControls.SessionBasedRiskControl != nil { - control, ok := trader.riskControls.SessionBasedRiskControl[sessionName] - if ok { - control.SetBaseOrderExecutor(baseOrderExecutor) + if err := parseStructAndInject(strategy, + market, + indicatorSet, + store, + session, + session.OrderExecutor, + ); err != nil { + return errors.Wrapf(err, "failed to inject object into %T", strategy) + } + } - // pick the order executor - if control.OrderExecutor != nil { - orderExecutor = control.OrderExecutor - } - } - } + // If the strategy has Validate() method, run it and check the error + if v, ok := strategy.(Validator); ok { + if err := v.Validate(); err != nil { + return fmt.Errorf("failed to validate the config: %w", err) } + } - for _, strategy := range strategies { - rs := reflect.ValueOf(strategy) - if rs.Elem().Kind() == reflect.Struct { - // get the struct element - rs = rs.Elem() + return strategy.Run(ctx, orderExecutor, session) +} - if err := injectField(rs, "Graceful", &trader.Graceful, true); err != nil { - log.WithError(err).Errorf("strategy Graceful injection failed") - return err - } +func (trader *Trader) getSessionOrderExecutor(sessionName string) OrderExecutor { + var session = trader.environment.sessions[sessionName] - if err := injectField(rs, "Logger", &trader.logger, false); err != nil { - log.WithError(err).Errorf("strategy Logger injection failed") - return err - } - - if err := injectField(rs, "Notifiability", &trader.environment.Notifiability, false); err != nil { - log.WithError(err).Errorf("strategy Notifiability injection failed") - return err - } + // default to base order executor + var orderExecutor OrderExecutor = session.OrderExecutor - if err := injectField(rs, "OrderExecutor", orderExecutor, false); err != nil { - log.WithError(err).Errorf("strategy OrderExecutor injection failed") - return err - } + // Since the risk controls are loaded from the config file + if trader.riskControls != nil && trader.riskControls.SessionBasedRiskControl != nil { + if control, ok := trader.riskControls.SessionBasedRiskControl[sessionName]; ok { + control.SetBaseOrderExecutor(session.OrderExecutor) - if symbol, ok := isSymbolBasedStrategy(rs); ok { - log.Infof("found symbol based strategy from %s", rs.Type()) - if _, ok := hasField(rs, "Market"); ok { - if market, ok := session.Market(symbol); ok { - // let's make the market object passed by pointer - if err := injectField(rs, "Market", &market, false); err != nil { - log.WithError(err).Errorf("strategy %T Market injection failed", strategy) - return err - } - } - } - - // StandardIndicatorSet - if _, ok := hasField(rs, "StandardIndicatorSet"); ok { - if indicatorSet, ok := session.StandardIndicatorSet(symbol); ok { - if err := injectField(rs, "StandardIndicatorSet", indicatorSet, true); err != nil { - log.WithError(err).Errorf("strategy %T StandardIndicatorSet injection failed", strategy) - return err - } - } - } - - if _, ok := hasField(rs, "MarketDataStore"); ok { - if store, ok := session.MarketDataStore(symbol); ok { - if err := injectField(rs, "MarketDataStore", store, true); err != nil { - log.WithError(err).Errorf("strategy %T MarketDataStore injection failed", strategy) - return err - } - } - } - } + // pick the wrapped order executor + if control.OrderExecutor != nil { + return control.OrderExecutor } + } + } + + return orderExecutor +} - err := strategy.Run(ctx, orderExecutor, session) - if err != nil { +func (trader *Trader) RunAllSingleExchangeStrategy(ctx context.Context) error { + // load and run Session strategies + for sessionName, strategies := range trader.exchangeStrategies { + var session = trader.environment.sessions[sessionName] + var orderExecutor = trader.getSessionOrderExecutor(sessionName) + for _, strategy := range strategies { + if err := trader.RunSingleExchangeStrategy(ctx, strategy, session, orderExecutor); err != nil { return err } } } + return nil +} + +func (trader *Trader) Run(ctx context.Context) error { + // before we start the interaction, + // register the core interaction, because we can only get the strategies in this scope + // trader.environment.Connect will call interact.Start + interact.AddCustomInteraction(NewCoreInteraction(trader.environment, trader)) + + trader.Subscribe() + + if err := trader.environment.Start(ctx); err != nil { + return err + } + + if err := trader.RunAllSingleExchangeStrategy(ctx); err != nil { + return err + } + router := &ExchangeOrderExecutionRouter{ - Notifiability: trader.environment.Notifiability, - sessions: trader.environment.sessions, + sessions: trader.environment.sessions, + executors: make(map[string]OrderExecutor), + } + for sessionID := range trader.environment.sessions { + var orderExecutor = trader.getSessionOrderExecutor(sessionID) + router.executors[sessionID] = orderExecutor } for _, strategy := range trader.crossExchangeStrategies { rs := reflect.ValueOf(strategy) - if rs.Elem().Kind() == reflect.Struct { - // get the struct element - rs = rs.Elem() - - if field, ok := hasField(rs, "Persistence"); ok { - if trader.environment.PersistenceServiceFacade == nil { - log.Warnf("strategy has Persistence field but persistence service is not defined") - } else { - log.Infof("found Persistence field, injecting...") - if field.IsNil() { - field.Set(reflect.ValueOf(&Persistence{ - PersistenceSelector: &PersistenceSelector{ - StoreID: "default", - Type: "memory", - }, - Facade: trader.environment.PersistenceServiceFacade, - })) - } else { - elem := field.Elem() - if elem.Kind() != reflect.Struct { - return fmt.Errorf("the field Persistence is not a struct element") - } - - if err := injectField(elem, "Facade", trader.environment.PersistenceServiceFacade, true); err != nil { - log.WithError(err).Errorf("strategy Persistence injection failed") - return err - } - } - } - } - if err := injectField(rs, "Graceful", &trader.Graceful, true); err != nil { - log.WithError(err).Errorf("strategy Graceful injection failed") - return err - } + // get the struct element from the struct pointer + rs = rs.Elem() + if rs.Kind() != reflect.Struct { + continue + } - if err := injectField(rs, "Logger", &trader.logger, false); err != nil { - log.WithError(err).Errorf("strategy Logger injection failed") - return err - } + if err := trader.injectCommonServices(strategy); err != nil { + return err + } + + if err := strategy.CrossRun(ctx, router, trader.environment.sessions); err != nil { + return err + } + } + + return trader.environment.Connect(ctx) +} + +func (trader *Trader) LoadState() error { + if trader.environment.BacktestService != nil { + return nil + } + + if PersistenceServiceFacade == nil { + return nil + } + + ps := PersistenceServiceFacade.Get() - if err := injectField(rs, "Notifiability", &trader.environment.Notifiability, false); err != nil { - log.WithError(err).Errorf("strategy Notifiability injection failed") + log.Infof("loading strategies states...") + + return trader.IterateStrategies(func(strategy StrategyID) error { + id := callID(strategy) + return loadPersistenceFields(strategy, id, ps) + }) +} + +func (trader *Trader) IterateStrategies(f func(st StrategyID) error) error { + for _, strategies := range trader.exchangeStrategies { + for _, strategy := range strategies { + if err := f(strategy); err != nil { return err } - } + } - if err := strategy.CrossRun(ctx, router, trader.environment.sessions); err != nil { + for _, strategy := range trader.crossExchangeStrategies { + if err := f(strategy); err != nil { return err } } - return trader.environment.Connect(ctx) + return nil } -// ReportPnL configure and set the PnLReporter with the given notifier -func (trader *Trader) ReportPnL() *PnLReporterManager { - return NewPnLReporter(&trader.environment.Notifiability) +func (trader *Trader) SaveState() error { + if trader.environment.BacktestService != nil { + return nil + } + + if PersistenceServiceFacade == nil { + return nil + } + + ps := PersistenceServiceFacade.Get() + + log.Infof("saving strategies states...") + return trader.IterateStrategies(func(strategy StrategyID) error { + id := callID(strategy) + if len(id) == 0 { + return nil + } + + return storePersistenceFields(strategy, id, ps) + }) +} + +var defaultPersistenceSelector = &PersistenceSelector{ + StoreID: "default", + Type: "memory", +} + +func (trader *Trader) injectCommonServices(s interface{}) error { + persistence := &Persistence{ + PersistenceSelector: defaultPersistenceSelector, + } + + // a special injection for persistence selector: + // if user defined the selector, the facade pointer will be nil, hence we need to update the persistence facade pointer + sv := reflect.ValueOf(s).Elem() + if field, ok := hasField(sv, "Persistence"); ok { + // the selector is set, but we need to update the facade pointer + if !field.IsNil() { + elem := field.Elem() + if elem.Kind() != reflect.Struct { + return fmt.Errorf("field Persistence is not a struct element, %s given", field) + } + + if err := injectField(elem, "Facade", PersistenceServiceFacade, true); err != nil { + return err + } + + /* + if err := parseStructAndInject(field.Interface(), persistenceFacade); err != nil { + return err + } + */ + } + } + + return parseStructAndInject(s, + &trader.Graceful, + &trader.logger, + Notification, + trader.environment.TradeService, + trader.environment.OrderService, + trader.environment.DatabaseService, + trader.environment.AccountService, + trader.environment, + persistence, + PersistenceServiceFacade, // if the strategy use persistence facade separately + ) } diff --git a/pkg/bbgo/trader_test.go b/pkg/bbgo/trader_test.go new file mode 100644 index 0000000000..f30d11b655 --- /dev/null +++ b/pkg/bbgo/trader_test.go @@ -0,0 +1 @@ +package bbgo diff --git a/pkg/bbgo/twap_order_executor.go b/pkg/bbgo/twap_order_executor.go new file mode 100644 index 0000000000..1a942cd88d --- /dev/null +++ b/pkg/bbgo/twap_order_executor.go @@ -0,0 +1,467 @@ +package bbgo + +import ( + "context" + "fmt" + "sync" + "time" + + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" + "golang.org/x/time/rate" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +type TwapExecution struct { + Session *ExchangeSession + Symbol string + Side types.SideType + TargetQuantity fixedpoint.Value + SliceQuantity fixedpoint.Value + StopPrice fixedpoint.Value + NumOfTicks int + UpdateInterval time.Duration + DeadlineTime time.Time + + market types.Market + marketDataStream types.Stream + + userDataStream types.Stream + userDataStreamCtx context.Context + cancelUserDataStream context.CancelFunc + + orderBook *types.StreamOrderBook + currentPrice fixedpoint.Value + activePosition fixedpoint.Value + + activeMakerOrders *ActiveOrderBook + orderStore *OrderStore + position *types.Position + + executionCtx context.Context + cancelExecution context.CancelFunc + + stoppedC chan struct{} + + state int + + mu sync.Mutex +} + +func (e *TwapExecution) connectMarketData(ctx context.Context) { + log.Infof("connecting market data stream...") + if err := e.marketDataStream.Connect(ctx); err != nil { + log.WithError(err).Errorf("market data stream connect error") + } +} + +func (e *TwapExecution) connectUserData(ctx context.Context) { + log.Infof("connecting user data stream...") + if err := e.userDataStream.Connect(ctx); err != nil { + log.WithError(err).Errorf("user data stream connect error") + } +} + +func (e *TwapExecution) newBestPriceOrder() (orderForm types.SubmitOrder, err error) { + book := e.orderBook.Copy() + sideBook := book.SideBook(e.Side) + + first, ok := sideBook.First() + if !ok { + return orderForm, fmt.Errorf("empty %s %s side book", e.Symbol, e.Side) + } + + newPrice := first.Price + spread, ok := book.Spread() + if !ok { + return orderForm, errors.New("can not calculate spread, neither bid price or ask price exists") + } + + // for example, we have tickSize = 0.01, and spread is 28.02 - 28.00 = 0.02 + // assign tickSpread = min(spread - tickSize, tickSpread) + // + // if number of ticks = 0, than the tickSpread is 0 + // tickSpread = min(0.02 - 0.01, 0) + // price = first bid price 28.00 + tickSpread (0.00) = 28.00 + // + // if number of ticks = 1, than the tickSpread is 0.01 + // tickSpread = min(0.02 - 0.01, 0.01) + // price = first bid price 28.00 + tickSpread (0.01) = 28.01 + // + // if number of ticks = 2, than the tickSpread is 0.02 + // tickSpread = min(0.02 - 0.01, 0.02) + // price = first bid price 28.00 + tickSpread (0.01) = 28.01 + tickSize := e.market.TickSize + tickSpread := tickSize.Mul(fixedpoint.NewFromInt(int64(e.NumOfTicks))) + if spread.Compare(tickSize) > 0 { + // there is a gap in the spread + tickSpread = fixedpoint.Min(tickSpread, spread.Sub(tickSize)) + switch e.Side { + case types.SideTypeSell: + newPrice = newPrice.Sub(tickSpread) + case types.SideTypeBuy: + newPrice = newPrice.Add(tickSpread) + } + } + + if e.StopPrice.Sign() > 0 { + switch e.Side { + case types.SideTypeSell: + if newPrice.Compare(e.StopPrice) < 0 { + log.Infof("%s order price %s is lower than the stop sell price %s, setting order price to the stop sell price %s", + e.Symbol, + newPrice.String(), + e.StopPrice.String(), + e.StopPrice.String()) + newPrice = e.StopPrice + } + + case types.SideTypeBuy: + if newPrice.Compare(e.StopPrice) > 0 { + log.Infof("%s order price %s is higher than the stop buy price %s, setting order price to the stop buy price %s", + e.Symbol, + newPrice.String(), + e.StopPrice.String(), + e.StopPrice.String()) + newPrice = e.StopPrice + } + } + } + + minQuantity := e.market.MinQuantity + base := e.position.GetBase() + + restQuantity := e.TargetQuantity.Sub(base.Abs()) + + if restQuantity.Sign() <= 0 { + if e.cancelContextIfTargetQuantityFilled() { + return + } + } + + if restQuantity.Compare(minQuantity) < 0 { + return orderForm, fmt.Errorf("can not continue placing orders, rest quantity %s is less than the min quantity %s", restQuantity.String(), minQuantity.String()) + } + + // when slice = 1000, if we only have 998, we should adjust our quantity to 998 + orderQuantity := fixedpoint.Min(e.SliceQuantity, restQuantity) + + // if the rest quantity in the next round is not enough, we should merge the rest quantity into this round + // if there are rest slices + nextRestQuantity := restQuantity.Sub(e.SliceQuantity) + if nextRestQuantity.Sign() > 0 && nextRestQuantity.Compare(minQuantity) < 0 { + orderQuantity = restQuantity + } + + minNotional := e.market.MinNotional + orderQuantity = AdjustQuantityByMinAmount(orderQuantity, newPrice, minNotional) + + switch e.Side { + case types.SideTypeSell: + // check base balance for sell, try to sell as more as possible + if b, ok := e.Session.GetAccount().Balance(e.market.BaseCurrency); ok { + orderQuantity = fixedpoint.Min(b.Available, orderQuantity) + } + + case types.SideTypeBuy: + // check base balance for sell, try to sell as more as possible + if b, ok := e.Session.GetAccount().Balance(e.market.QuoteCurrency); ok { + orderQuantity = AdjustQuantityByMaxAmount(orderQuantity, newPrice, b.Available) + } + } + + if e.DeadlineTime != emptyTime { + now := time.Now() + if now.After(e.DeadlineTime) { + orderForm = types.SubmitOrder{ + Symbol: e.Symbol, + Side: e.Side, + Type: types.OrderTypeMarket, + Quantity: restQuantity, + Market: e.market, + } + return orderForm, nil + } + } + + orderForm = types.SubmitOrder{ + // ClientOrderID: "", + Symbol: e.Symbol, + Side: e.Side, + Type: types.OrderTypeLimitMaker, + Quantity: orderQuantity, + Price: newPrice, + Market: e.market, + TimeInForce: "GTC", + } + return orderForm, err +} + +func (e *TwapExecution) updateOrder(ctx context.Context) error { + book := e.orderBook.Copy() + sideBook := book.SideBook(e.Side) + + first, ok := sideBook.First() + if !ok { + return fmt.Errorf("empty %s %s side book", e.Symbol, e.Side) + } + + // if there is no gap between the first price entry and the second price entry + second, ok := sideBook.Second() + if !ok { + return fmt.Errorf("no secoond price on the %s order book %s, can not update", e.Symbol, e.Side) + } + + tickSize := e.market.TickSize + numOfTicks := fixedpoint.NewFromInt(int64(e.NumOfTicks)) + tickSpread := tickSize.Mul(numOfTicks) + + // check and see if we need to cancel the existing active orders + for e.activeMakerOrders.NumOfOrders() > 0 { + orders := e.activeMakerOrders.Orders() + + if len(orders) > 1 { + log.Warnf("more than 1 %s open orders in the strategy...", e.Symbol) + } + + // get the first order + order := orders[0] + orderPrice := order.Price + // quantity := fixedpoint.NewFromFloat(order.Quantity) + + remainingQuantity := order.Quantity.Sub(order.ExecutedQuantity) + if remainingQuantity.Compare(e.market.MinQuantity) <= 0 { + log.Infof("order remaining quantity %s is less than the market minimal quantity %s, skip updating order", remainingQuantity.String(), e.market.MinQuantity.String()) + return nil + } + + // if the first bid price or first ask price is the same to the current active order + // we should skip updating the order + // DO NOT UPDATE IF: + // tickSpread > 0 AND current order price == second price + tickSpread + // current order price == first price + log.Infof("orderPrice = %s first.Price = %s second.Price = %s tickSpread = %s", orderPrice.String(), first.Price.String(), second.Price.String(), tickSpread.String()) + + switch e.Side { + case types.SideTypeBuy: + if tickSpread.Sign() > 0 && orderPrice == second.Price.Add(tickSpread) { + log.Infof("the current order is already on the best ask price %s", orderPrice.String()) + return nil + } else if orderPrice == first.Price { + log.Infof("the current order is already on the best bid price %s", orderPrice.String()) + return nil + } + + case types.SideTypeSell: + if tickSpread.Sign() > 0 && orderPrice == second.Price.Sub(tickSpread) { + log.Infof("the current order is already on the best ask price %s", orderPrice.String()) + return nil + } else if orderPrice == first.Price { + log.Infof("the current order is already on the best ask price %s", orderPrice.String()) + return nil + } + } + + e.cancelActiveOrders() + } + + orderForm, err := e.newBestPriceOrder() + if err != nil { + return err + } + + createdOrders, err := e.Session.OrderExecutor.SubmitOrders(ctx, orderForm) + if err != nil { + return err + } + + e.activeMakerOrders.Add(createdOrders...) + e.orderStore.Add(createdOrders...) + return nil +} + +func (e *TwapExecution) cancelActiveOrders() { + gracefulCtx, gracefulCancel := context.WithTimeout(context.TODO(), 30*time.Second) + defer gracefulCancel() + e.activeMakerOrders.GracefulCancel(gracefulCtx, e.Session.Exchange) +} + +func (e *TwapExecution) orderUpdater(ctx context.Context) { + updateLimiter := rate.NewLimiter(rate.Every(3*time.Second), 1) + ticker := time.NewTimer(e.UpdateInterval) + defer ticker.Stop() + + // we should stop updater and clean up our open orders, if + // 1. the given context is canceled. + // 2. the base quantity equals to or greater than the target quantity + defer func() { + e.cancelActiveOrders() + e.cancelUserDataStream() + e.emitDone() + }() + + for { + select { + case <-ctx.Done(): + return + + case <-e.orderBook.C: + if !updateLimiter.Allow() { + break + } + + if e.cancelContextIfTargetQuantityFilled() { + return + } + + log.Infof("%s order book changed, checking order...", e.Symbol) + if err := e.updateOrder(ctx); err != nil { + log.WithError(err).Errorf("order update failed") + } + + case <-ticker.C: + if !updateLimiter.Allow() { + break + } + + if e.cancelContextIfTargetQuantityFilled() { + return + } + + if err := e.updateOrder(ctx); err != nil { + log.WithError(err).Errorf("order update failed") + } + + } + } +} + +func (e *TwapExecution) cancelContextIfTargetQuantityFilled() bool { + base := e.position.GetBase() + + if base.Abs().Compare(e.TargetQuantity) >= 0 { + log.Infof("filled target quantity, canceling the order execution context") + e.cancelExecution() + return true + } + return false +} + +func (e *TwapExecution) handleTradeUpdate(trade types.Trade) { + // ignore trades that are not in the symbol we interested + if trade.Symbol != e.Symbol { + return + } + + if !e.orderStore.Exists(trade.OrderID) { + return + } + + log.Info(trade.String()) + + e.position.AddTrade(trade) + log.Infof("position updated: %+v", e.position) +} + +func (e *TwapExecution) handleFilledOrder(order types.Order) { + log.Info(order.String()) + + // filled event triggers the order removal from the active order store + // we need to ensure we received every order update event before the execution is done. + e.cancelContextIfTargetQuantityFilled() +} + +func (e *TwapExecution) Run(parentCtx context.Context) error { + e.mu.Lock() + e.stoppedC = make(chan struct{}) + e.executionCtx, e.cancelExecution = context.WithCancel(parentCtx) + e.userDataStreamCtx, e.cancelUserDataStream = context.WithCancel(context.Background()) + e.mu.Unlock() + + if e.UpdateInterval == 0 { + e.UpdateInterval = 10 * time.Second + } + + var ok bool + e.market, ok = e.Session.Market(e.Symbol) + if !ok { + return fmt.Errorf("market %s not found", e.Symbol) + } + + e.marketDataStream = e.Session.Exchange.NewStream() + e.marketDataStream.SetPublicOnly() + e.marketDataStream.Subscribe(types.BookChannel, e.Symbol, types.SubscribeOptions{}) + + e.orderBook = types.NewStreamBook(e.Symbol) + e.orderBook.BindStream(e.marketDataStream) + go e.connectMarketData(e.executionCtx) + + e.userDataStream = e.Session.Exchange.NewStream() + e.userDataStream.OnTradeUpdate(e.handleTradeUpdate) + e.position = &types.Position{ + Symbol: e.Symbol, + BaseCurrency: e.market.BaseCurrency, + QuoteCurrency: e.market.QuoteCurrency, + } + + e.orderStore = NewOrderStore(e.Symbol) + e.orderStore.BindStream(e.userDataStream) + e.activeMakerOrders = NewActiveOrderBook(e.Symbol) + e.activeMakerOrders.OnFilled(e.handleFilledOrder) + e.activeMakerOrders.BindStream(e.userDataStream) + + go e.connectUserData(e.userDataStreamCtx) + go e.orderUpdater(e.executionCtx) + return nil +} + +func (e *TwapExecution) emitDone() { + e.mu.Lock() + if e.stoppedC == nil { + e.stoppedC = make(chan struct{}) + } + close(e.stoppedC) + e.mu.Unlock() +} + +func (e *TwapExecution) Done() (c <-chan struct{}) { + e.mu.Lock() + // if the channel is not allocated, it means it's not started yet, we need to return a closed channel + if e.stoppedC == nil { + e.stoppedC = make(chan struct{}) + close(e.stoppedC) + c = e.stoppedC + } else { + c = e.stoppedC + } + + e.mu.Unlock() + return c +} + +// Shutdown stops the execution +// If we call this method, it means the execution is still running, +// We need to: +// 1. stop the order updater (by using the execution context) +// 2. the order updater cancels all open orders and close the user data stream +func (e *TwapExecution) Shutdown(shutdownCtx context.Context) { + e.mu.Lock() + if e.cancelExecution != nil { + e.cancelExecution() + } + e.mu.Unlock() + + for { + select { + + case <-shutdownCtx.Done(): + return + + case <-e.Done(): + return + + } + } +} diff --git a/pkg/bbgo/cache.go b/pkg/cache/cache.go similarity index 54% rename from pkg/bbgo/cache.go rename to pkg/cache/cache.go index f700d012ca..5a5f0ccc26 100644 --- a/pkg/bbgo/cache.go +++ b/pkg/cache/cache.go @@ -1,17 +1,25 @@ -package bbgo +package cache import ( + "context" "encoding/json" + "fmt" "io/ioutil" "os" "path" "reflect" + "time" "github.com/pkg/errors" + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/types" ) type DataFetcher func() (interface{}, error) +const cacheExpiry = 24 * time.Hour + // WithCache let you use the cache with the given cache key, variable reference and your data fetcher, // The key must be an unique ID. // obj is the pointer of your local variable @@ -20,7 +28,10 @@ func WithCache(key string, obj interface{}, fetcher DataFetcher) error { cacheDir := CacheDir() cacheFile := path.Join(cacheDir, key+".json") - if _, err := os.Stat(cacheFile); os.IsNotExist(err) { + stat, err := os.Stat(cacheFile) + if os.IsNotExist(err) || (stat != nil && time.Since(stat.ModTime()) > cacheExpiry) { + log.Debugf("cache %s not found or cache expired, executing fetcher callback to get the data", cacheFile) + data, err := fetcher() if err != nil { return err @@ -43,6 +54,7 @@ func WithCache(key string, obj interface{}, fetcher DataFetcher) error { rv.Set(reflect.ValueOf(data)) } else { + log.Debugf("cache %s found", cacheFile) data, err := ioutil.ReadFile(cacheFile) if err != nil { @@ -56,3 +68,18 @@ func WithCache(key string, obj interface{}, fetcher DataFetcher) error { return nil } + +func LoadExchangeMarketsWithCache(ctx context.Context, ex types.Exchange) (markets types.MarketMap, err error) { + key := fmt.Sprintf("%s-markets", ex.Name()) + if futureExchange, implemented := ex.(types.FuturesExchange); implemented { + settings := futureExchange.GetFuturesSettings() + if settings.IsFutures { + key = fmt.Sprintf("%s-futures-markets", ex.Name()) + } + } + + err = WithCache(key, &markets, func() (interface{}, error) { + return ex.QueryMarkets(ctx) + }) + return markets, err +} diff --git a/pkg/bbgo/home.go b/pkg/cache/home.go similarity index 80% rename from pkg/bbgo/home.go rename to pkg/cache/home.go index 8f7268f3d8..c6480543e6 100644 --- a/pkg/bbgo/home.go +++ b/pkg/cache/home.go @@ -1,4 +1,4 @@ -package bbgo +package cache import ( "os" @@ -20,11 +20,6 @@ func CacheDir() string { return prepareDir(dir) } -func SourceDir() string { - home := HomeDir() - return path.Join(home, "source") -} - func HomeDir() string { homeDir, err := os.UserHomeDir() if err != nil { diff --git a/pkg/cmd/account.go b/pkg/cmd/account.go new file mode 100644 index 0000000000..41ed9465d3 --- /dev/null +++ b/pkg/cmd/account.go @@ -0,0 +1,97 @@ +package cmd + +import ( + "context" + "fmt" + + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + + "github.com/c9s/bbgo/pkg/types" + + "github.com/c9s/bbgo/pkg/bbgo" +) + +func init() { + accountCmd.Flags().String("session", "", "the exchange session name for querying information") + accountCmd.Flags().Bool("total", false, "report total asset") + RootCmd.AddCommand(accountCmd) +} + +// go run ./cmd/bbgo account --session=ftx --config=config/bbgo.yaml +var accountCmd = &cobra.Command{ + Use: "account [--session SESSION]", + Short: "show user account details (ex: balance)", + SilenceUsage: true, + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + showTotal, err := cmd.Flags().GetBool("total") + if err != nil { + return err + } + + sessionName, err := cmd.Flags().GetString("session") + if err != nil { + return err + } + + environ := bbgo.NewEnvironment() + if err := environ.ConfigureDatabase(ctx); err != nil { + return err + } + + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return err + } + + if len(sessionName) > 0 { + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + + a, err := session.Exchange.QueryAccount(ctx) + if err != nil { + return errors.Wrapf(err, "account query failed") + } + + a.Print() + } else { + var total = types.BalanceMap{} + for _, session := range environ.Sessions() { + a, err := session.Exchange.QueryAccount(ctx) + if err != nil { + return errors.Wrapf(err, "account query failed") + } + + log.Infof("--------------------------------------------") + log.Infof("SESSION %s", session.Name) + log.Infof("--------------------------------------------") + a.Print() + + for c, b := range a.Balances() { + tb, ok := total[c] + if !ok { + total[c] = b + } else { + tb.Available = tb.Available.Add(b.Available) + tb.Locked = tb.Locked.Add(b.Locked) + total[c] = tb + } + } + + if showTotal { + log.Infof("===============================================") + log.Infof("TOTAL ASSETS") + log.Infof("===============================================") + total.Print() + } + } + + } + + return nil + }, +} diff --git a/pkg/cmd/backtest.go b/pkg/cmd/backtest.go index 7411441089..363d2d2824 100644 --- a/pkg/cmd/backtest.go +++ b/pkg/cmd/backtest.go @@ -1,10 +1,18 @@ package cmd import ( + "bufio" "context" "fmt" + "os" + "path/filepath" + "sort" + "strings" + "syscall" "time" + "github.com/fatih/color" + "github.com/google/uuid" "github.com/pkg/errors" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" @@ -14,24 +22,34 @@ import ( "github.com/c9s/bbgo/pkg/backtest" "github.com/c9s/bbgo/pkg/bbgo" "github.com/c9s/bbgo/pkg/cmd/cmdutil" + "github.com/c9s/bbgo/pkg/data/tsv" + "github.com/c9s/bbgo/pkg/exchange" "github.com/c9s/bbgo/pkg/service" "github.com/c9s/bbgo/pkg/types" + "github.com/c9s/bbgo/pkg/util" ) func init() { - BacktestCmd.Flags().String("exchange", "", "target exchange") BacktestCmd.Flags().Bool("sync", false, "sync backtest data") BacktestCmd.Flags().Bool("sync-only", false, "sync backtest data only, do not run backtest") - BacktestCmd.Flags().String("sync-from", time.Now().AddDate(0, -6, 0).Format(types.DateFormat), "sync backtest data from the given time") + BacktestCmd.Flags().String("sync-from", "", "sync backtest data from the given time, which will override the time range in the backtest config") + BacktestCmd.Flags().String("sync-exchange", "", "specify only one exchange to sync backtest data") + BacktestCmd.Flags().String("session", "", "specify only one exchange session to run backtest") + + BacktestCmd.Flags().Bool("verify", false, "verify the kline back-test data") + BacktestCmd.Flags().Bool("base-asset-baseline", false, "use base asset performance as the competitive baseline performance") BacktestCmd.Flags().CountP("verbose", "v", "verbose level") BacktestCmd.Flags().String("config", "config/bbgo.yaml", "strategy config file") + BacktestCmd.Flags().Bool("force", false, "force execution without confirm") + BacktestCmd.Flags().String("output", "", "the report output directory") + BacktestCmd.Flags().Bool("subdir", false, "generate report in the sub-directory of the output directory") RootCmd.AddCommand(BacktestCmd) } var BacktestCmd = &cobra.Command{ Use: "backtest", - Short: "backtest your strategies", + Short: "run backtest with strategies", SilenceUsage: true, RunE: func(cmd *cobra.Command, args []string) error { verboseCnt, err := cmd.Flags().GetCount("verbose") @@ -39,6 +57,10 @@ var BacktestCmd = &cobra.Command{ return err } + if viper.GetBool("debug") { + verboseCnt = 2 + } + configFile, err := cmd.Flags().GetString("config") if err != nil { return err @@ -58,45 +80,49 @@ var BacktestCmd = &cobra.Command{ return err } - syncOnly, err := cmd.Flags().GetBool("sync-only") + syncExchangeName, err := cmd.Flags().GetString("sync-exchange") if err != nil { return err } - syncFromDateStr, err := cmd.Flags().GetString("sync-from") + sessionName, err := cmd.Flags().GetString("session") if err != nil { return err } - syncFromTime, err := time.Parse(types.DateFormat, syncFromDateStr) + force, err := cmd.Flags().GetBool("force") if err != nil { return err } - exchangeNameStr, err := cmd.Flags().GetString("exchange") + outputDirectory, err := cmd.Flags().GetString("output") if err != nil { return err } - exchangeName, err := types.ValidExchangeName(exchangeNameStr) + generatingReport := len(outputDirectory) > 0 + + reportFileInSubDir, err := cmd.Flags().GetBool("subdir") if err != nil { return err } - sourceExchange, err := cmdutil.NewExchange(exchangeName) + syncOnly, err := cmd.Flags().GetBool("sync-only") if err != nil { return err } - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() + syncFromDateStr, err := cmd.Flags().GetString("sync-from") + if err != nil { + return err + } - userConfig, err := bbgo.Load(configFile, true) + shouldVerify, err := cmd.Flags().GetBool("verify") if err != nil { return err } - db, err := bbgo.ConnectMySQL(viper.GetString("mysql-url")) + userConfig, err := bbgo.Load(configFile, true) if err != nil { return err } @@ -105,75 +131,92 @@ var BacktestCmd = &cobra.Command{ return errors.New("backtest config is not defined") } + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + var now = time.Now() + var startTime, endTime time.Time + + startTime = userConfig.Backtest.StartTime.Time() + // set default start time to the past 6 months - if len(userConfig.Backtest.StartTime) == 0 { - userConfig.Backtest.StartTime = time.Now().AddDate(0, -6, 0).Format("2006-01-02") + // userConfig.Backtest.StartTime = now.AddDate(0, -6, 0).Format("2006-01-02") + if userConfig.Backtest.EndTime != nil { + endTime = userConfig.Backtest.EndTime.Time() + } else { + endTime = now } - startTime, err := userConfig.Backtest.ParseStartTime() - if err != nil { - return err - } + log.Infof("starting backtest with startTime %s", startTime.Format(time.ANSIC)) environ := bbgo.NewEnvironment() - if err := environ.ConfigureDatabase(ctx); err != nil { + if err := BootstrapBacktestEnvironment(ctx, environ); err != nil { return err } - backtestService := &service.BacktestService{DB: db} + if environ.DatabaseService == nil { + return errors.New("database service is not enabled, please check your environment variables DB_DRIVER and DB_DSN") + } - if wantSync { - log.Info("starting synchronization...") - for _, symbol := range userConfig.Backtest.Symbols { - if err := backtestService.Sync(ctx, sourceExchange, symbol, syncFromTime); err != nil { - return err - } + backtestService := &service.BacktestService{DB: environ.DatabaseService.DB} + environ.BacktestService = backtestService + bbgo.SetBackTesting(backtestService) + + if len(sessionName) > 0 { + userConfig.Backtest.Sessions = []string{sessionName} + } else if len(syncExchangeName) > 0 { + userConfig.Backtest.Sessions = []string{syncExchangeName} + } else if len(userConfig.Backtest.Sessions) == 0 { + log.Infof("backtest.sessions is not defined, using all supported exchanges: %v", types.SupportedExchanges) + for _, exName := range types.SupportedExchanges { + userConfig.Backtest.Sessions = append(userConfig.Backtest.Sessions, exName.String()) } - log.Info("synchronization done") - - var corruptCnt = 0 - for _, symbol := range userConfig.Backtest.Symbols { - log.Infof("verifying backtesting data...") + } - for interval := range types.SupportedIntervals { - log.Infof("verifying %s %s kline data...", symbol, interval) + var sourceExchanges = make(map[types.ExchangeName]types.Exchange) + for _, name := range userConfig.Backtest.Sessions { + exName, err := types.ValidExchangeName(name) + if err != nil { + return err + } - klineC, errC := backtestService.QueryKLinesCh(startTime, time.Now(), sourceExchange, []string{symbol}, []types.Interval{interval}) - var emptyKLine types.KLine - var prevKLine types.KLine - for k := range klineC { - if verboseCnt > 1 { - fmt.Print(".") - } + publicExchange, err := exchange.NewPublic(exName) + if err != nil { + return err + } + sourceExchanges[exName] = publicExchange + } - if prevKLine != emptyKLine { - if prevKLine.StartTime.Add(interval.Duration()) != k.StartTime { - corruptCnt++ - log.Errorf("found kline data corrupted at time: %s kline: %+v", k.StartTime, k) - log.Errorf("between %d and %d", - prevKLine.StartTime.Unix(), - k.StartTime.Unix()) - } - } + var syncFromTime time.Time - prevKLine = k - } + // user can override the sync from time if the option is given + if len(syncFromDateStr) > 0 { + syncFromTime, err = time.Parse(types.DateFormat, syncFromDateStr) + if err != nil { + return err + } - if verboseCnt > 1 { - fmt.Println() - } + if syncFromTime.After(startTime) { + return fmt.Errorf("sync-from time %s can not be latter than the backtest start time %s", syncFromTime, startTime) + } + } else { + // we need at least 1 month backward data for EMA and last prices + syncFromTime = startTime.AddDate(0, -1, 0) + log.Infof("adjusted sync start time %s to %s for backward market data", startTime, syncFromTime) + } - if err := <-errC; err != nil { - return err - } - } + if wantSync { + log.Infof("starting synchronization: %v", userConfig.Backtest.Symbols) + if err := sync(ctx, userConfig, backtestService, sourceExchanges, syncFromTime.Local(), endTime.Local()); err != nil { + return err } + log.Info("synchronization done") - log.Infof("backtest verification completed") - if corruptCnt > 0 { - log.Errorf("found %d corruptions", corruptCnt) - } else { - log.Infof("found %d corruptions", corruptCnt) + if shouldVerify { + err := verify(userConfig, backtestService, sourceExchanges, syncFromTime.Local(), endTime.Local()) + if err != nil { + return err + } } if syncOnly { @@ -181,19 +224,21 @@ var BacktestCmd = &cobra.Command{ } } - backtestExchange := backtest.NewExchange(exchangeName, backtestService, userConfig.Backtest) - - environ.SetStartTime(startTime) - environ.AddExchange(exchangeName.String(), backtestExchange) + if userConfig.Backtest.RecordTrades { + log.Warn("!!! Trade recording is enabled for back-testing !!!") + log.Warn("!!! To run back-testing, you should use an isolated database for storing back-testing trades !!!") + log.Warn("!!! The trade record in the current database WILL ALL BE DELETED BEFORE THIS BACK-TESTING !!!") + if !force { + if !confirmation("Are you sure to continue?") { + return nil + } + } - environ.Notifiability = bbgo.Notifiability{ - SymbolChannelRouter: bbgo.NewPatternChannelRouter(nil), - SessionChannelRouter: bbgo.NewPatternChannelRouter(nil), - ObjectChannelRouter: bbgo.NewObjectChannelRouter(), + if err := environ.TradeService.DeleteAll(); err != nil { + return err + } } - trader := bbgo.NewTrader(environ) - if verboseCnt == 2 { log.SetLevel(log.DebugLevel) } else if verboseCnt > 0 { @@ -201,90 +246,463 @@ var BacktestCmd = &cobra.Command{ } else { // default mode, disable strategy logging and order executor logging log.SetLevel(log.ErrorLevel) - trader.DisableLogging() } - if userConfig.RiskControls != nil { - log.Infof("setting risk controls: %+v", userConfig.RiskControls) - trader.SetRiskControls(userConfig.RiskControls) + environ.SetStartTime(startTime) + + // exchangeNameStr is the session name. + for name, sourceExchange := range sourceExchanges { + backtestExchange, err := backtest.NewExchange(sourceExchange.Name(), sourceExchange, backtestService, userConfig.Backtest) + if err != nil { + return errors.Wrap(err, "failed to create backtest exchange") + } + session := environ.AddExchange(name.String(), backtestExchange) + exchangeFromConfig := userConfig.Sessions[name.String()] + if exchangeFromConfig != nil { + session.UseHeikinAshi = exchangeFromConfig.UseHeikinAshi + } } - for _, entry := range userConfig.ExchangeStrategies { - log.Infof("attaching strategy %T on %s instead of %v", entry.Strategy, exchangeName.String(), entry.Mounts) - trader.AttachStrategyOn(exchangeName.String(), entry.Strategy) + if err := environ.Init(ctx); err != nil { + return err } - if len(userConfig.CrossExchangeStrategies) > 0 { - log.Warnf("backtest does not support CrossExchangeStrategy, strategies won't be added.") + trader := bbgo.NewTrader(environ) + if verboseCnt == 0 { + trader.DisableLogging() + } + + if err := trader.Configure(userConfig); err != nil { + return err } if err := trader.Run(ctx); err != nil { return err } - <-backtestExchange.Done() + backTestIntervals := []types.Interval{types.Interval1h, types.Interval1d} + exchangeSources, err := toExchangeSources(environ.Sessions(), backTestIntervals...) + if err != nil { + return err + } + + var kLineHandlers []func(k types.KLine, exSource *backtest.ExchangeDataSource) + var manifests backtest.Manifests + var runID = userConfig.GetSignature() + "_" + uuid.NewString() + var reportDir = outputDirectory + + if generatingReport { + if reportFileInSubDir { + // reportDir = filepath.Join(reportDir, backtestSessionName) + reportDir = filepath.Join(reportDir, runID) + } + if err := util.SafeMkdirAll(reportDir); err != nil { + return err + } + + startTimeStr := startTime.Format("20060102") + endTimeStr := endTime.Format("20060102") + kLineSubDir := strings.Join([]string{"klines", "_", startTimeStr, "-", endTimeStr}, "") + kLineDataDir := filepath.Join(outputDirectory, "shared", kLineSubDir) + if err := util.SafeMkdirAll(kLineDataDir); err != nil { + return err + } + + stateRecorder := backtest.NewStateRecorder(reportDir) + err = trader.IterateStrategies(func(st bbgo.StrategyID) error { + return stateRecorder.Scan(st.(backtest.Instance)) + }) + if err != nil { + return err + } + + manifests = stateRecorder.Manifests() + manifests, err = rewriteManifestPaths(manifests, reportDir) + if err != nil { + return err + } + + // state snapshot + kLineHandlers = append(kLineHandlers, func(k types.KLine, _ *backtest.ExchangeDataSource) { + // snapshot per 1m + if k.Interval == types.Interval1m && k.Closed { + if _, err := stateRecorder.Snapshot(); err != nil { + log.WithError(err).Errorf("state record failed to snapshot the strategy state") + } + } + }) + + dumper := backtest.NewKLineDumper(kLineDataDir) + defer func() { + _ = dumper.Close() + }() + defer func() { + if err := dumper.Close(); err != nil { + log.WithError(err).Errorf("kline dumper can not close files") + } + }() + + kLineHandlers = append(kLineHandlers, func(k types.KLine, _ *backtest.ExchangeDataSource) { + if err := dumper.Record(k); err != nil { + log.WithError(err).Errorf("can not write kline to file") + } + }) + + // equity curve recording -- record per 1h kline + equityCurveTsv, err := tsv.NewWriterFile(filepath.Join(reportDir, "equity_curve.tsv")) + if err != nil { + return err + } + defer func() { _ = equityCurveTsv.Close() }() + + _ = equityCurveTsv.Write([]string{ + "time", + "in_usd", + }) + defer equityCurveTsv.Flush() + + kLineHandlers = append(kLineHandlers, func(k types.KLine, exSource *backtest.ExchangeDataSource) { + if k.Interval != types.Interval1h { + return + } + + balances, err := exSource.Exchange.QueryAccountBalances(ctx) + if err != nil { + log.WithError(err).Errorf("query back-test account balance error") + } else { + assets := balances.Assets(exSource.Session.AllLastPrices(), k.EndTime.Time()) + _ = equityCurveTsv.Write([]string{ + k.EndTime.Time().Format(time.RFC1123), + assets.InUSD().String(), + }) + } + }) + + ordersTsv, err := tsv.NewWriterFile(filepath.Join(reportDir, "orders.tsv")) + if err != nil { + return err + } + defer func() { _ = ordersTsv.Close() }() + _ = ordersTsv.Write(types.Order{}.CsvHeader()) + + for _, exSource := range exchangeSources { + exSource.Session.UserDataStream.OnOrderUpdate(func(order types.Order) { + if order.Status == types.OrderStatusFilled { + for _, record := range order.CsvRecords() { + _ = ordersTsv.Write(record) + } + } + }) + } + } + + runCtx, cancelRun := context.WithCancel(ctx) + go func() { + defer cancelRun() + + // Optimize back-test speed for single exchange source + var numOfExchangeSources = len(exchangeSources) + if numOfExchangeSources == 1 { + exSource := exchangeSources[0] + for k := range exSource.C { + exSource.Exchange.ConsumeKLine(k) + + for _, h := range kLineHandlers { + h(k, &exSource) + } + + } + + if err := exSource.Exchange.CloseMarketData(); err != nil { + log.WithError(err).Errorf("close market data error") + } + return + } + + RunMultiExchangeData: + for { + for _, exK := range exchangeSources { + k, more := <-exK.C + if !more { + if err := exK.Exchange.CloseMarketData(); err != nil { + log.WithError(err).Errorf("close market data error") + return + } + break RunMultiExchangeData + } + + exK.Exchange.ConsumeKLine(k) + + for _, h := range kLineHandlers { + h(k, &exK) + } + } + } + }() + + cmdutil.WaitForSignal(runCtx, syscall.SIGINT, syscall.SIGTERM) log.Infof("shutting down trader...") - shutdownCtx, cancel := context.WithDeadline(ctx, time.Now().Add(10*time.Second)) + shutdownCtx, cancelShutdown := context.WithDeadline(runCtx, time.Now().Add(10*time.Second)) trader.Graceful.Shutdown(shutdownCtx) - cancel() + cancelShutdown() // put the logger back to print the pnl log.SetLevel(log.InfoLevel) + + // aggregate total balances + initTotalBalances := types.BalanceMap{} + finalTotalBalances := types.BalanceMap{} + var sessionNames []string for _, session := range environ.Sessions() { + sessionNames = append(sessionNames, session.Name) + accountConfig := userConfig.Backtest.GetAccount(session.Name) + initBalances := accountConfig.Balances.BalanceMap() + initTotalBalances = initTotalBalances.Add(initBalances) - calculator := &pnl.AverageCostCalculator{ - TradingFeeCurrency: backtestExchange.PlatformFeeCurrency(), - } - for symbol, trades := range session.Trades { - market, ok := session.Market(symbol) - if !ok { - return fmt.Errorf("market not found: %s", symbol) - } + finalBalances := session.GetAccount().Balances() + finalTotalBalances = finalTotalBalances.Add(finalBalances) + } - startPrice, ok := session.StartPrice(symbol) - if !ok { - return fmt.Errorf("start price not found: %s", symbol) + summaryReport := &backtest.SummaryReport{ + StartTime: startTime, + EndTime: endTime, + Sessions: sessionNames, + InitialTotalBalances: initTotalBalances, + FinalTotalBalances: finalTotalBalances, + Manifests: manifests, + Symbols: nil, + } + + allKLineIntervals := map[types.Interval]struct{}{} + for _, interval := range backTestIntervals { + allKLineIntervals[interval] = struct{}{} + } + + for _, session := range environ.Sessions() { + for _, sub := range session.Subscriptions { + if sub.Channel == types.KLineChannel { + allKLineIntervals[sub.Options.Interval] = struct{}{} } + } + } + for interval := range allKLineIntervals { + summaryReport.Intervals = append(summaryReport.Intervals, interval) + } - log.Infof("%s PROFIT AND LOSS REPORT", symbol) - log.Infof("===============================================") + for _, session := range environ.Sessions() { - lastPrice, ok := session.LastPrice(symbol) - if !ok { - return fmt.Errorf("last price not found: %s", symbol) + for symbol, trades := range session.Trades { + symbolReport, err := createSymbolReport(userConfig, session, symbol, trades.Trades) + if err != nil { + return err } - report := calculator.Calculate(symbol, trades.Trades, lastPrice) - report.Print() + summaryReport.Symbols = append(summaryReport.Symbols, symbol) + summaryReport.SymbolReports = append(summaryReport.SymbolReports, *symbolReport) + summaryReport.TotalProfit = symbolReport.PnL.Profit + summaryReport.TotalUnrealizedProfit = symbolReport.PnL.UnrealizedProfit - initBalances := userConfig.Backtest.Account.Balances.BalanceMap() - finalBalances := session.Account.Balances() + // write report to a file + if generatingReport { + reportFileName := fmt.Sprintf("symbol_report_%s.json", symbol) + if err := util.WriteJsonFile(filepath.Join(reportDir, reportFileName), &symbolReport); err != nil { + return err + } + } + } + } - log.Infof("INITIAL BALANCES:") - initBalances.Print() + if generatingReport { + summaryReportFile := filepath.Join(reportDir, "summary.json") - log.Infof("FINAL BALANCES:") - finalBalances.Print() + // output summary report filepath to stdout, so that our optimizer can read from it + fmt.Println(summaryReportFile) - if wantBaseAssetBaseline { - initBaseAsset := InBaseAsset(initBalances, market, startPrice) - finalBaseAsset := InBaseAsset(finalBalances, market, lastPrice) - log.Infof("INITIAL ASSET ~= %s %s (1 %s = %f)", market.FormatQuantity(initBaseAsset), market.BaseCurrency, market.BaseCurrency, startPrice) - log.Infof("FINAL ASSET ~= %s %s (1 %s = %f)", market.FormatQuantity(finalBaseAsset), market.BaseCurrency, market.BaseCurrency, lastPrice) + if err := util.WriteJsonFile(summaryReportFile, summaryReport); err != nil { + return err + } - log.Infof("%s BASE ASSET PERFORMANCE: %.2f%% (= (%.2f - %.2f) / %.2f)", market.BaseCurrency, (finalBaseAsset-initBaseAsset)/initBaseAsset*100.0, finalBaseAsset, initBaseAsset, initBaseAsset) - log.Infof("%s PERFORMANCE: %.2f%% (= (%.2f - %.2f) / %.2f)", market.BaseCurrency, (lastPrice-startPrice)/startPrice*100.0, lastPrice, startPrice, startPrice) + // append report index + if reportFileInSubDir { + if err := backtest.AddReportIndexRun(outputDirectory, backtest.Run{ + ID: runID, + Config: userConfig, + Time: time.Now(), + }); err != nil { + return err } } + } else { + color.Green("BACK-TEST REPORT") + color.Green("===============================================\n") + color.Green("START TIME: %s\n", startTime.Format(time.RFC1123)) + color.Green("END TIME: %s\n", endTime.Format(time.RFC1123)) + color.Green("INITIAL TOTAL BALANCE: %v\n", initTotalBalances) + color.Green("FINAL TOTAL BALANCE: %v\n", finalTotalBalances) + + for _, symbolReport := range summaryReport.SymbolReports { + symbolReport.Print(wantBaseAssetBaseline) + } } return nil }, } -func InBaseAsset(balances types.BalanceMap, market types.Market, price float64) float64 { - quote := balances[market.QuoteCurrency] - base := balances[market.BaseCurrency] - return (base.Locked.Float64() + base.Available.Float64()) + ((quote.Locked.Float64() + quote.Available.Float64()) / price) +func createSymbolReport(userConfig *bbgo.Config, session *bbgo.ExchangeSession, symbol string, trades []types.Trade) (*backtest.SessionSymbolReport, error) { + backtestExchange, ok := session.Exchange.(*backtest.Exchange) + if !ok { + return nil, fmt.Errorf("unexpected error, exchange instance is not a backtest exchange") + } + + market, ok := session.Market(symbol) + if !ok { + return nil, fmt.Errorf("market not found: %s, %s", symbol, session.Exchange.Name()) + } + + startPrice, ok := session.StartPrice(symbol) + if !ok { + return nil, fmt.Errorf("start price not found: %s, %s. run --sync first", symbol, session.Exchange.Name()) + } + + lastPrice, ok := session.LastPrice(symbol) + if !ok { + return nil, fmt.Errorf("last price not found: %s, %s", symbol, session.Exchange.Name()) + } + + calculator := &pnl.AverageCostCalculator{ + TradingFeeCurrency: backtestExchange.PlatformFeeCurrency(), + Market: market, + } + + report := calculator.Calculate(symbol, trades, lastPrice) + accountConfig := userConfig.Backtest.GetAccount(session.Exchange.Name().String()) + initBalances := accountConfig.Balances.BalanceMap() + finalBalances := session.GetAccount().Balances() + symbolReport := backtest.SessionSymbolReport{ + Exchange: session.Exchange.Name(), + Symbol: symbol, + Market: market, + LastPrice: lastPrice, + StartPrice: startPrice, + PnL: report, + InitialBalances: initBalances, + FinalBalances: finalBalances, + // Manifests: manifests, + } + + for _, s := range session.Subscriptions { + symbolReport.Subscriptions = append(symbolReport.Subscriptions, s) + } + + sessionKLineIntervals := map[types.Interval]struct{}{} + for _, sub := range session.Subscriptions { + if sub.Channel == types.KLineChannel { + sessionKLineIntervals[sub.Options.Interval] = struct{}{} + } + } + + for interval := range sessionKLineIntervals { + symbolReport.Intervals = append(symbolReport.Intervals, interval) + } + + return &symbolReport, nil +} + +func verify(userConfig *bbgo.Config, backtestService *service.BacktestService, sourceExchanges map[types.ExchangeName]types.Exchange, startTime, endTime time.Time) error { + for _, sourceExchange := range sourceExchanges { + err := backtestService.Verify(sourceExchange, userConfig.Backtest.Symbols, startTime, endTime) + if err != nil { + return err + } + } + return nil +} + +func confirmation(s string) bool { + reader := bufio.NewReader(os.Stdin) + for { + fmt.Printf("%s [y/N]: ", s) + + response, err := reader.ReadString('\n') + if err != nil { + log.Fatal(err) + } + + response = strings.ToLower(strings.TrimSpace(response)) + + if response == "y" || response == "yes" { + return true + } else if response == "n" || response == "no" { + return false + } else { + return false + } + } +} + +func toExchangeSources(sessions map[string]*bbgo.ExchangeSession, extraIntervals ...types.Interval) (exchangeSources []backtest.ExchangeDataSource, err error) { + for _, session := range sessions { + exchange := session.Exchange.(*backtest.Exchange) + exchange.UserDataStream = session.UserDataStream.(types.StandardStreamEmitter) + exchange.MarketDataStream = session.MarketDataStream.(types.StandardStreamEmitter) + exchange.InitMarketData() + + c, err := exchange.SubscribeMarketData(extraIntervals...) + if err != nil { + return exchangeSources, err + } + + sessionCopy := session + exchangeSources = append(exchangeSources, backtest.ExchangeDataSource{ + C: c, + Exchange: exchange, + Session: sessionCopy, + }) + } + return exchangeSources, nil +} + +func sync(ctx context.Context, userConfig *bbgo.Config, backtestService *service.BacktestService, sourceExchanges map[types.ExchangeName]types.Exchange, syncFrom, syncTo time.Time) error { + for _, symbol := range userConfig.Backtest.Symbols { + for _, sourceExchange := range sourceExchanges { + exCustom, ok := sourceExchange.(types.CustomIntervalProvider) + + var supportIntervals map[types.Interval]int + if ok { + supportIntervals = exCustom.SupportedInterval() + } else { + supportIntervals = types.SupportedIntervals + } + + // sort intervals + var intervals []types.Interval + for interval := range supportIntervals { + intervals = append(intervals, interval) + } + sort.Slice(intervals, func(i, j int) bool { + return intervals[i].Duration() < intervals[j].Duration() + }) + + for _, interval := range intervals { + if err := backtestService.Sync(ctx, sourceExchange, symbol, interval, syncFrom, syncTo); err != nil { + return err + } + } + } + } + return nil +} + +func rewriteManifestPaths(manifests backtest.Manifests, basePath string) (backtest.Manifests, error) { + var filterManifests = backtest.Manifests{} + for k, m := range manifests { + p, err := filepath.Rel(basePath, m) + if err != nil { + return nil, err + } + filterManifests[k] = p + } + return filterManifests, nil } diff --git a/pkg/cmd/balances.go b/pkg/cmd/balances.go new file mode 100644 index 0000000000..492b49d648 --- /dev/null +++ b/pkg/cmd/balances.go @@ -0,0 +1,67 @@ +package cmd + +import ( + "context" + "fmt" + + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + + "github.com/c9s/bbgo/pkg/bbgo" +) + +func init() { + balancesCmd.Flags().String("session", "", "the exchange session name for querying balances") + RootCmd.AddCommand(balancesCmd) +} + +// go run ./cmd/bbgo balances --session=ftx +var balancesCmd = &cobra.Command{ + Use: "balances [--session SESSION]", + Short: "Show user account balances", + SilenceUsage: true, + PreRunE: cobraInitRequired([]string{ + "session", + }), + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + sessionName, err := cmd.Flags().GetString("session") + if err != nil { + return err + } + + environ := bbgo.NewEnvironment() + + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return err + } + + if len(sessionName) > 0 { + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + + b, err := session.Exchange.QueryAccountBalances(ctx) + if err != nil { + return err + } + + b.Print() + } else { + for _, session := range environ.Sessions() { + + b, err := session.Exchange.QueryAccountBalances(ctx) + if err != nil { + return err + } + + log.Infof("SESSION %s", session.Name) + b.Print() + } + } + + return nil + }, +} diff --git a/pkg/cmd/build.go b/pkg/cmd/build.go index 6740e4791e..3a8c5c22d7 100644 --- a/pkg/cmd/build.go +++ b/pkg/cmd/build.go @@ -11,7 +11,7 @@ import ( ) func init() { - BuildCmd.Flags().String("config", "", "config file") + BuildCmd.Flags().String("config", "bbgo.yaml", "config file") RootCmd.AddCommand(BuildCmd) } diff --git a/pkg/cmd/builtin.go b/pkg/cmd/builtin.go index 538ab6966a..ad2baa3bde 100644 --- a/pkg/cmd/builtin.go +++ b/pkg/cmd/builtin.go @@ -2,13 +2,36 @@ package cmd // import built-in strategies import ( + _ "github.com/c9s/bbgo/pkg/strategy/autoborrow" _ "github.com/c9s/bbgo/pkg/strategy/bollgrid" - _ "github.com/c9s/bbgo/pkg/strategy/buyandhold" + _ "github.com/c9s/bbgo/pkg/strategy/bollmaker" + _ "github.com/c9s/bbgo/pkg/strategy/dca" + _ "github.com/c9s/bbgo/pkg/strategy/emastop" + _ "github.com/c9s/bbgo/pkg/strategy/etf" + _ "github.com/c9s/bbgo/pkg/strategy/ewoDgtrd" + _ "github.com/c9s/bbgo/pkg/strategy/factorzoo" _ "github.com/c9s/bbgo/pkg/strategy/flashcrash" + _ "github.com/c9s/bbgo/pkg/strategy/fmaker" + _ "github.com/c9s/bbgo/pkg/strategy/funding" _ "github.com/c9s/bbgo/pkg/strategy/grid" - _ "github.com/c9s/bbgo/pkg/strategy/mirrormaker" + _ "github.com/c9s/bbgo/pkg/strategy/infinity-grid" + _ "github.com/c9s/bbgo/pkg/strategy/kline" + _ "github.com/c9s/bbgo/pkg/strategy/marketcap" + _ "github.com/c9s/bbgo/pkg/strategy/pivotshort" _ "github.com/c9s/bbgo/pkg/strategy/pricealert" + _ "github.com/c9s/bbgo/pkg/strategy/pricedrop" + _ "github.com/c9s/bbgo/pkg/strategy/rebalance" + _ "github.com/c9s/bbgo/pkg/strategy/rsmaker" + _ "github.com/c9s/bbgo/pkg/strategy/schedule" + _ "github.com/c9s/bbgo/pkg/strategy/skeleton" + _ "github.com/c9s/bbgo/pkg/strategy/supertrend" + _ "github.com/c9s/bbgo/pkg/strategy/support" _ "github.com/c9s/bbgo/pkg/strategy/swing" - _ "github.com/c9s/bbgo/pkg/strategy/trailingstop" + _ "github.com/c9s/bbgo/pkg/strategy/techsignal" + _ "github.com/c9s/bbgo/pkg/strategy/wall" + _ "github.com/c9s/bbgo/pkg/strategy/xbalance" + _ "github.com/c9s/bbgo/pkg/strategy/xgap" + _ "github.com/c9s/bbgo/pkg/strategy/xmaker" + _ "github.com/c9s/bbgo/pkg/strategy/xnav" _ "github.com/c9s/bbgo/pkg/strategy/xpuremaker" ) diff --git a/pkg/cmd/cancel.go b/pkg/cmd/cancel.go index 03cc64a410..cbab02194a 100644 --- a/pkg/cmd/cancel.go +++ b/pkg/cmd/cancel.go @@ -4,10 +4,8 @@ import ( "context" "fmt" - "github.com/pkg/errors" "github.com/sirupsen/logrus" "github.com/spf13/cobra" - "github.com/spf13/viper" "github.com/c9s/bbgo/pkg/bbgo" "github.com/c9s/bbgo/pkg/types" @@ -20,20 +18,22 @@ type advancedOrderCancelApi interface { } func init() { - CancelCmd.Flags().String("session", "", "session to execute cancel orders") - CancelCmd.Flags().String("symbol", "", "symbol to cancel orders") - CancelCmd.Flags().Int64("group-id", 0, "groupID to cancel orders") - RootCmd.AddCommand(CancelCmd) + cancelOrderCmd.Flags().String("session", "", "session to execute cancel orders") + cancelOrderCmd.Flags().String("symbol", "", "symbol to cancel orders") + cancelOrderCmd.Flags().Int64("group-id", 0, "group ID to cancel orders") + cancelOrderCmd.Flags().Uint64("order-id", 0, "order ID to cancel orders") + cancelOrderCmd.Flags().String("order-uuid", "", "order UUID to cancel orders") + cancelOrderCmd.Flags().Bool("all", false, "cancel all orders") + RootCmd.AddCommand(cancelOrderCmd) } -var CancelCmd = &cobra.Command{ - Use: "cancel", +var cancelOrderCmd = &cobra.Command{ + Use: "cancel-order", Short: "cancel orders", Long: "this command can cancel orders from exchange", // SilenceUsage is an option to silence usage when an error occurs. SilenceUsage: true, - RunE: func(cmd *cobra.Command, args []string) error { ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -48,79 +48,105 @@ var CancelCmd = &cobra.Command{ return err } - configFile, err := cmd.Flags().GetString("config") + orderID, err := cmd.Flags().GetUint64("order-id") if err != nil { return err } - if len(configFile) == 0 { - return errors.New("--config option is required") + orderUUID, err := cmd.Flags().GetString("order-uuid") + if err != nil { + return err } - userConfig, err := bbgo.Load(configFile, false) + all, err := cmd.Flags().GetBool("all") if err != nil { return err } - environ := bbgo.NewEnvironment() - - if viper.IsSet("mysql-url") { - db, err := bbgo.ConnectMySQL(viper.GetString("mysql-url")) - if err != nil { - return err - } - environ.SetDB(db) + sessionName, err := cmd.Flags().GetString("session") + if err != nil { + return err } - if err := environ.AddExchangesFromConfig(userConfig); err != nil { + environ := bbgo.NewEnvironment() + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { return err } - if userConfig.Persistence != nil { - if err := environ.ConfigurePersistence(userConfig.Persistence); err != nil { - return err - } + if err := environ.Init(ctx); err != nil { + return err } var sessions = environ.Sessions() - if n, err := cmd.Flags().GetString("session"); err == nil && len(n) > 0 { - ses, ok := sessions[n] + if len(sessionName) > 0 { + ses, ok := environ.Session(sessionName) if !ok { - return fmt.Errorf("session %s not found", n) + return fmt.Errorf("session %s not found", sessionName) } - sessions = map[string]*bbgo.ExchangeSession{n: ses} - } - - for sessionID, session := range sessions { - var log = logrus.WithField("session", sessionID) - - e, ok := session.Exchange.(advancedOrderCancelApi) - if ok && groupID > 0 { - log.Infof("canceling orders by group id: %d", groupID) + if orderID > 0 || orderUUID != "" { + if orderID > 0 { + logrus.Infof("canceling order by the given order id %d", orderID) + } else if orderUUID != "" { + logrus.Infof("canceling order by the given order uuid %s", orderUUID) + } - orders, err := e.CancelOrdersByGroupID(ctx, groupID) + err := ses.Exchange.CancelOrders(ctx, types.Order{ + SubmitOrder: types.SubmitOrder{ + Symbol: symbol, + }, + OrderID: orderID, + UUID: orderUUID, + }) if err != nil { return err } + return nil + } - for _, o := range orders { - log.Info("CANCELED ", o.String()) - } - - } else if ok && len(symbol) > 0 { - log.Infof("canceling orders by symbol: %s", symbol) + sessions = map[string]*bbgo.ExchangeSession{sessionName: ses} + } - orders, err := e.CancelOrdersBySymbol(ctx, symbol) - if err != nil { - return err - } + for sessionID, session := range sessions { + var log = logrus.WithField("session", sessionID) - for _, o := range orders { - log.Info("CANCELED ", o.String()) + e, ok := session.Exchange.(advancedOrderCancelApi) + if ok { + if all { + log.Infof("canceling all orders") + + orders, err := e.CancelAllOrders(ctx) + if err != nil { + return err + } + + for _, o := range orders { + log.Info("CANCELED ", o.String()) + } + } else if groupID > 0 { + log.Infof("canceling orders by group id: %d", groupID) + + orders, err := e.CancelOrdersByGroupID(ctx, groupID) + if err != nil { + return err + } + + for _, o := range orders { + log.Info("CANCELED ", o.String()) + } + } else if len(symbol) > 0 { + log.Infof("canceling orders by symbol: %s", symbol) + + orders, err := e.CancelOrdersBySymbol(ctx, symbol) + if err != nil { + return err + } + + for _, o := range orders { + log.Info("CANCELED ", o.String()) + } } - } else if len(symbol) > 0 { openOrders, err := session.Exchange.QueryOpenOrders(ctx, symbol) if err != nil { diff --git a/pkg/cmd/cmdutil/exchange.go b/pkg/cmd/cmdutil/exchange.go index cadb68ec34..b6eaaab516 100644 --- a/pkg/cmd/cmdutil/exchange.go +++ b/pkg/cmd/cmdutil/exchange.go @@ -1,48 +1 @@ package cmdutil - -import ( - "fmt" - - "github.com/pkg/errors" - "github.com/spf13/viper" - - "github.com/c9s/bbgo/pkg/exchange/binance" - "github.com/c9s/bbgo/pkg/exchange/max" - "github.com/c9s/bbgo/pkg/types" -) - -func NewExchangeWithEnvVarPrefix(n types.ExchangeName, varPrefix string) (types.Exchange, error) { - if len(varPrefix) == 0 { - varPrefix = n.String() - } - - switch n { - - case types.ExchangeBinance: - key := viper.GetString(varPrefix + "-api-key") - secret := viper.GetString(varPrefix + "-api-secret") - if len(key) == 0 || len(secret) == 0 { - return nil, errors.New("binance: empty key or secret") - } - - return binance.New(key, secret), nil - - case types.ExchangeMax: - key := viper.GetString(varPrefix + "-api-key") - secret := viper.GetString(varPrefix + "-api-secret") - if len(key) == 0 || len(secret) == 0 { - return nil, errors.New("max: empty key or secret") - } - - return max.New(key, secret), nil - - default: - return nil, fmt.Errorf("unsupported exchange: %v", n) - - } -} - -// NewExchange constructor exchange object from viper config. -func NewExchange(n types.ExchangeName) (types.Exchange, error) { - return NewExchangeWithEnvVarPrefix(n, "") -} diff --git a/pkg/cmd/deposit.go b/pkg/cmd/deposit.go new file mode 100644 index 0000000000..a43b12acfb --- /dev/null +++ b/pkg/cmd/deposit.go @@ -0,0 +1,70 @@ +package cmd + +import ( + "context" + "fmt" + "time" + + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/types" +) + +func init() { + depositsCmd.Flags().String("session", "", "the exchange session name for querying balances") + depositsCmd.Flags().String("asset", "", "the trading pair, like btcusdt") + RootCmd.AddCommand(depositsCmd) +} + +// go run ./cmd/bbgo deposits --session=ftx --asset="BTC" +// This is a testing util and will query deposits in last 7 days. +var depositsCmd = &cobra.Command{ + Use: "deposits", + Short: "A testing utility that will query deposition history in last 7 days", + SilenceUsage: true, + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + environ := bbgo.NewEnvironment() + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return err + } + + sessionName, err := cmd.Flags().GetString("session") + if err != nil { + return err + } + + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + + asset, err := cmd.Flags().GetString("asset") + if err != nil { + return fmt.Errorf("can't get the asset from flags: %w", err) + } + if asset == "" { + return fmt.Errorf("asset is not found") + } + + until := time.Now() + since := until.Add(-7 * 24 * time.Hour) + exchange, ok := session.Exchange.(types.ExchangeTransferService) + if !ok { + return fmt.Errorf("exchange session %s does not implement transfer service", sessionName) + } + histories, err := exchange.QueryDepositHistory(ctx, asset, since, until) + if err != nil { + return err + } + + log.Infof("%d histories", len(histories)) + for _, h := range histories { + log.Infof("deposit history: %+v", h) + } + return nil + }, +} diff --git a/pkg/cmd/kline.go b/pkg/cmd/kline.go new file mode 100644 index 0000000000..7a346d0a30 --- /dev/null +++ b/pkg/cmd/kline.go @@ -0,0 +1,93 @@ +package cmd + +import ( + "context" + "fmt" + "syscall" + + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/cmd/cmdutil" + "github.com/c9s/bbgo/pkg/types" +) + +// go run ./cmd/bbgo kline --exchange=ftx --symbol=BTCUSDT +var klineCmd = &cobra.Command{ + Use: "kline", + Short: "connect to the kline market data streaming service of an exchange", + PreRunE: cobraInitRequired([]string{ + "session", + "symbol", + "interval", + }), + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + environ := bbgo.NewEnvironment() + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return err + } + + sessionName, err := cmd.Flags().GetString("session") + if err != nil { + return err + } + + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + + symbol, err := cmd.Flags().GetString("symbol") + if err != nil { + return fmt.Errorf("can not get the symbol from flags: %w", err) + } + + if symbol == "" { + return fmt.Errorf("--symbol option is required") + } + + interval, err := cmd.Flags().GetString("interval") + if err != nil { + return err + } + + s := session.Exchange.NewStream() + s.SetPublicOnly() + s.Subscribe(types.KLineChannel, symbol, types.SubscribeOptions{Interval: types.Interval(interval)}) + + s.OnKLineClosed(func(kline types.KLine) { + log.Infof("kline closed: %s", kline.String()) + }) + + s.OnKLine(func(kline types.KLine) { + log.Infof("kline: %s", kline.String()) + }) + + log.Infof("connecting...") + if err := s.Connect(ctx); err != nil { + return err + } + + log.Infof("connected") + defer func() { + log.Infof("closing connection...") + if err := s.Close(); err != nil { + log.WithError(err).Errorf("connection close error") + } + }() + + cmdutil.WaitForSignal(ctx, syscall.SIGINT, syscall.SIGTERM) + return nil + }, +} + +func init() { + // since the public data does not require trading authentication, we use --exchange option here. + klineCmd.Flags().String("session", "", "session name") + klineCmd.Flags().String("symbol", "", "the trading pair. e.g, BTCUSDT, LTCUSDT...") + klineCmd.Flags().String("interval", "1m", "interval of the kline (candle), .e.g, 1m, 3m, 15m") + RootCmd.AddCommand(klineCmd) +} diff --git a/pkg/cmd/margin.go b/pkg/cmd/margin.go new file mode 100644 index 0000000000..1ad608f90d --- /dev/null +++ b/pkg/cmd/margin.go @@ -0,0 +1,189 @@ +package cmd + +import ( + "context" + "errors" + "fmt" + "time" + + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/types" +) + +var selectedSession *bbgo.ExchangeSession + +func init() { + marginLoansCmd.Flags().String("asset", "", "asset") + marginLoansCmd.Flags().String("session", "", "exchange session name") + marginCmd.AddCommand(marginLoansCmd) + + marginRepaysCmd.Flags().String("asset", "", "asset") + marginRepaysCmd.Flags().String("session", "", "exchange session name") + marginCmd.AddCommand(marginRepaysCmd) + + marginInterestsCmd.Flags().String("asset", "", "asset") + marginInterestsCmd.Flags().String("session", "", "exchange session name") + marginCmd.AddCommand(marginInterestsCmd) + + RootCmd.AddCommand(marginCmd) +} + +// go run ./cmd/bbgo margin --session=binance +var marginCmd = &cobra.Command{ + Use: "margin", + Short: "margin related history", + SilenceUsage: true, + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + if err := cobraLoadDotenv(cmd, args); err != nil { + return err + } + + if err := cobraLoadConfig(cmd, args); err != nil { + return err + } + + // ctx := context.Background() + environ := bbgo.NewEnvironment() + + if userConfig == nil { + return errors.New("user config is not loaded") + } + + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return err + } + + sessionName, err := cmd.Flags().GetString("session") + if err != nil { + return err + } + + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + + selectedSession = session + return nil + }, +} + +// go run ./cmd/bbgo margin loans --session=binance +var marginLoansCmd = &cobra.Command{ + Use: "loans --session=SESSION_NAME --asset=ASSET", + Short: "query loans history", + SilenceUsage: true, + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + asset, err := cmd.Flags().GetString("asset") + if err != nil { + return err + } + + if selectedSession == nil { + return errors.New("session is not set") + } + + marginHistoryService, ok := selectedSession.Exchange.(types.MarginHistory) + if !ok { + return fmt.Errorf("exchange %s does not support MarginHistory service", selectedSession.ExchangeName) + } + + now := time.Now() + startTime := now.AddDate(0, -5, 0) + endTime := now + loans, err := marginHistoryService.QueryLoanHistory(ctx, asset, &startTime, &endTime) + if err != nil { + return err + } + + log.Infof("%d loans", len(loans)) + for _, loan := range loans { + log.Infof("LOAN %+v", loan) + } + + return nil + }, +} + +// go run ./cmd/bbgo margin loans --session=binance +var marginRepaysCmd = &cobra.Command{ + Use: "repays --session=SESSION_NAME --asset=ASSET", + Short: "query repay history", + SilenceUsage: true, + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + asset, err := cmd.Flags().GetString("asset") + if err != nil { + return err + } + + if selectedSession == nil { + return errors.New("session is not set") + } + + marginHistoryService, ok := selectedSession.Exchange.(types.MarginHistory) + if !ok { + return fmt.Errorf("exchange %s does not support MarginHistory service", selectedSession.ExchangeName) + } + + now := time.Now() + startTime := now.AddDate(0, -5, 0) + endTime := now + repays, err := marginHistoryService.QueryLoanHistory(ctx, asset, &startTime, &endTime) + if err != nil { + return err + } + + log.Infof("%d repays", len(repays)) + for _, repay := range repays { + log.Infof("REPAY %+v", repay) + } + + return nil + }, +} + +// go run ./cmd/bbgo margin interests --session=binance +var marginInterestsCmd = &cobra.Command{ + Use: "interests --session=SESSION_NAME --asset=ASSET", + Short: "query interests history", + SilenceUsage: true, + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + asset, err := cmd.Flags().GetString("asset") + if err != nil { + return fmt.Errorf("can't get the symbol from flags: %w", err) + } + + if selectedSession == nil { + return errors.New("session is not set") + } + + marginHistoryService, ok := selectedSession.Exchange.(types.MarginHistory) + if !ok { + return fmt.Errorf("exchange %s does not support MarginHistory service", selectedSession.ExchangeName) + } + + now := time.Now() + startTime := now.AddDate(0, -5, 0) + endTime := now + interests, err := marginHistoryService.QueryInterestHistory(ctx, asset, &startTime, &endTime) + if err != nil { + return err + } + + log.Infof("%d interests", len(interests)) + for _, interest := range interests { + log.Infof("INTEREST %+v", interest) + } + + return nil + }, +} diff --git a/pkg/cmd/market.go b/pkg/cmd/market.go new file mode 100644 index 0000000000..794f1bba63 --- /dev/null +++ b/pkg/cmd/market.go @@ -0,0 +1,75 @@ +package cmd + +import ( + "context" + "fmt" + "os" + + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + + "github.com/c9s/bbgo/pkg/bbgo" +) + +func init() { + marketCmd.Flags().String("session", "", "the exchange session name for querying information") + RootCmd.AddCommand(marketCmd) +} + +// go run ./cmd/bbgo market --session=ftx --config=config/bbgo.yaml +var marketCmd = &cobra.Command{ + Use: "market", + Short: "List the symbols that the are available to be traded in the exchange", + SilenceUsage: true, + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + configFile, err := cmd.Flags().GetString("config") + if err != nil { + return err + } + + if len(configFile) == 0 { + return errors.New("--config option is required") + } + + if _, err := os.Stat(configFile); os.IsNotExist(err) { + return err + } + + userConfig, err := bbgo.Load(configFile, false) + if err != nil { + return err + } + + environ := bbgo.NewEnvironment() + if err := environ.ConfigureDatabase(ctx); err != nil { + return err + } + + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return err + } + + sessionName, err := cmd.Flags().GetString("session") + if err != nil { + return err + } + + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + + markets, err := session.Exchange.QueryMarkets(ctx) + if err != nil { + return err + } + + for _, m := range markets { + log.Infof("market: %+v", m) + } + return nil + }, +} diff --git a/pkg/cmd/optimize.go b/pkg/cmd/optimize.go new file mode 100644 index 0000000000..27ed89f553 --- /dev/null +++ b/pkg/cmd/optimize.go @@ -0,0 +1,122 @@ +package cmd + +import ( + "context" + "encoding/json" + "fmt" + "io/ioutil" + "os" + + "github.com/spf13/cobra" + "gopkg.in/yaml.v3" + + "github.com/c9s/bbgo/pkg/optimizer" +) + +func init() { + optimizeCmd.Flags().String("optimizer-config", "optimizer.yaml", "config file") + optimizeCmd.Flags().String("output", "output", "backtest report output directory") + optimizeCmd.Flags().Bool("json", false, "print optimizer metrics in json format") + RootCmd.AddCommand(optimizeCmd) +} + +var optimizeCmd = &cobra.Command{ + Use: "optimize", + Short: "run optimizer", + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage: true, + + RunE: func(cmd *cobra.Command, args []string) error { + optimizerConfigFilename, err := cmd.Flags().GetString("optimizer-config") + if err != nil { + return err + } + + configFile, err := cmd.Flags().GetString("config") + if err != nil { + return err + } + + printJsonFormat, err := cmd.Flags().GetBool("json") + if err != nil { + return err + } + + outputDirectory, err := cmd.Flags().GetString("output") + if err != nil { + return err + } + + yamlBody, err := ioutil.ReadFile(configFile) + if err != nil { + return err + } + var obj map[string]interface{} + if err := yaml.Unmarshal(yamlBody, &obj); err != nil { + return err + } + delete(obj, "notifications") + delete(obj, "sync") + + optConfig, err := optimizer.LoadConfig(optimizerConfigFilename) + if err != nil { + return err + } + + // the config json template used for patch + configJson, err := json.MarshalIndent(obj, "", " ") + if err != nil { + return err + } + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + _ = ctx + + configDir, err := os.MkdirTemp("", "bbgo-config-*") + if err != nil { + return err + } + + executor := &optimizer.LocalProcessExecutor{ + Config: optConfig.Executor.LocalExecutorConfig, + Bin: os.Args[0], + WorkDir: ".", + ConfigDir: configDir, + OutputDir: outputDirectory, + } + + optz := &optimizer.GridOptimizer{ + Config: optConfig, + } + + metrics, err := optz.Run(executor, configJson) + if err != nil { + return err + } + + if printJsonFormat { + out, err := json.MarshalIndent(metrics, "", " ") + if err != nil { + return err + } + + // print metrics JSON to stdout + fmt.Println(string(out)) + } else { + for n, values := range metrics { + if len(values) == 0 { + continue + } + + fmt.Printf("%v => %s\n", values[0].Labels, n) + for _, m := range values { + fmt.Printf("%v => %s %v\n", m.Params, n, m.Value) + } + } + } + + return nil + }, +} diff --git a/pkg/cmd/orderbook.go b/pkg/cmd/orderbook.go new file mode 100644 index 0000000000..bfe9323441 --- /dev/null +++ b/pkg/cmd/orderbook.go @@ -0,0 +1,170 @@ +package cmd + +import ( + "context" + "fmt" + "syscall" + "time" + + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/cmd/cmdutil" + "github.com/c9s/bbgo/pkg/types" +) + +// go run ./cmd/bbgo orderbook --session=ftx --symbol=BTCUSDT +var orderbookCmd = &cobra.Command{ + Use: "orderbook --session=[exchange_name] --symbol=[pair_name]", + Short: "connect to the order book market data streaming service of an exchange", + PreRunE: cobraInitRequired([]string{ + "session", + "symbol", + }), + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + sessionName, err := cmd.Flags().GetString("session") + if err != nil { + return err + } + + symbol, err := cmd.Flags().GetString("symbol") + if err != nil { + return fmt.Errorf("can not get the symbol from flags: %w", err) + } + + if symbol == "" { + return fmt.Errorf("--symbol option is required") + } + + dumpDepthUpdate, err := cmd.Flags().GetBool("dump-update") + if err != nil { + return err + } + + environ := bbgo.NewEnvironment() + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return err + } + + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + + orderBook := types.NewMutexOrderBook(symbol) + + s := session.Exchange.NewStream() + s.SetPublicOnly() + s.Subscribe(types.BookChannel, symbol, types.SubscribeOptions{}) + s.OnBookSnapshot(func(book types.SliceOrderBook) { + if dumpDepthUpdate { + log.Infof("orderbook snapshot: %s", book.String()) + } + + orderBook.Load(book) + + if ok, err := orderBook.IsValid(); !ok { + log.WithError(err).Panicf("invalid error book snapshot") + } + + if bid, ask, ok := orderBook.BestBidAndAsk(); ok { + log.Infof("ASK | %f x %f / %f x %f | BID", + ask.Volume.Float64(), ask.Price.Float64(), + bid.Price.Float64(), bid.Volume.Float64()) + } + }) + + s.OnBookUpdate(func(book types.SliceOrderBook) { + if dumpDepthUpdate { + log.Infof("orderbook update: %s", book.String()) + } + orderBook.Update(book) + + if bid, ask, ok := orderBook.BestBidAndAsk(); ok { + log.Infof("ASK | %f x %f / %f x %f | BID", + ask.Volume.Float64(), ask.Price.Float64(), + bid.Price.Float64(), bid.Volume.Float64()) + } + }) + + log.Infof("connecting...") + if err := s.Connect(ctx); err != nil { + return fmt.Errorf("failed to connect to %s", sessionName) + } + + log.Infof("connected") + defer func() { + log.Infof("closing connection...") + if err := s.Close(); err != nil { + log.WithError(err).Errorf("connection close error") + } + time.Sleep(1 * time.Second) + }() + + cmdutil.WaitForSignal(ctx, syscall.SIGINT, syscall.SIGTERM) + return nil + }, +} + +// go run ./cmd/bbgo orderupdate --session=ftx +var orderUpdateCmd = &cobra.Command{ + Use: "orderupdate", + Short: "Listen to order update events", + PreRunE: cobraInitRequired([]string{ + "config", + "session", + }), + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + environ := bbgo.NewEnvironment() + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return err + } + + sessionName, err := cmd.Flags().GetString("session") + if err != nil { + return err + } + + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + + s := session.Exchange.NewStream() + s.OnOrderUpdate(func(order types.Order) { + log.Infof("order update: %+v", order) + }) + + log.Infof("connecting...") + if err := s.Connect(ctx); err != nil { + return fmt.Errorf("failed to connect to %s", sessionName) + } + + log.Infof("connected") + defer func() { + log.Infof("closing connection...") + if err := s.Close(); err != nil { + log.WithError(err).Errorf("connection close error") + } + time.Sleep(1 * time.Second) + }() + + cmdutil.WaitForSignal(ctx, syscall.SIGINT, syscall.SIGTERM) + return nil + }, +} + +func init() { + orderbookCmd.Flags().String("session", "", "session name") + orderbookCmd.Flags().String("symbol", "", "the trading pair. e.g, BTCUSDT, LTCUSDT...") + orderbookCmd.Flags().Bool("dump-update", false, "dump the depth update") + + orderUpdateCmd.Flags().String("session", "", "session name") + RootCmd.AddCommand(orderbookCmd) + RootCmd.AddCommand(orderUpdateCmd) +} diff --git a/pkg/cmd/orders.go b/pkg/cmd/orders.go new file mode 100644 index 0000000000..c75e8485de --- /dev/null +++ b/pkg/cmd/orders.go @@ -0,0 +1,404 @@ +package cmd + +import ( + "context" + "fmt" + "os" + "os/signal" + "strings" + "syscall" + "time" + + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + + "github.com/c9s/bbgo/pkg/fixedpoint" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/types" +) + +var getOrderCmd = &cobra.Command{ + Use: "get-order --session SESSION --order-id ORDER_ID", + Short: "Get order status", + SilenceUsage: true, + PreRunE: cobraInitRequired([]string{ + "order-id", + "symbol", + }), + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + environ := bbgo.NewEnvironment() + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return err + } + + sessionName, err := cmd.Flags().GetString("session") + if err != nil { + return err + } + + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + + orderID, err := cmd.Flags().GetString("order-id") + if err != nil { + return fmt.Errorf("can't get the symbol from flags: %w", err) + } + + service, ok := session.Exchange.(types.ExchangeOrderQueryService) + if !ok { + return fmt.Errorf("query order status is not supported for exchange %T, interface types.ExchangeOrderQueryService is not implemented", session.Exchange) + } + + order, err := service.QueryOrder(ctx, types.OrderQuery{ + OrderID: orderID, + }) + if err != nil { + return err + } + + log.Infof("%+v", order) + + return nil + }, +} + +// go run ./cmd/bbgo list-orders [open|closed] --session=ftx --symbol=BTCUSDT +var listOrdersCmd = &cobra.Command{ + Use: "list-orders open|closed --session SESSION --symbol SYMBOL", + Short: "list user's open orders in exchange of a specific trading pair", + Args: cobra.OnlyValidArgs, + // default is open which means we query open orders if you haven't provided args. + ValidArgs: []string{"", "open", "closed"}, + SilenceUsage: true, + PreRunE: cobraInitRequired([]string{ + "session", + "symbol", + }), + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + environ := bbgo.NewEnvironment() + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return err + } + + sessionName, err := cmd.Flags().GetString("session") + if err != nil { + return err + } + + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + symbol, err := cmd.Flags().GetString("symbol") + if err != nil { + return fmt.Errorf("can't get the symbol from flags: %w", err) + } + if symbol == "" { + return fmt.Errorf("symbol is not found") + } + + status := "open" + if len(args) != 0 { + status = args[0] + } + + var os []types.Order + switch status { + case "open": + os, err = session.Exchange.QueryOpenOrders(ctx, symbol) + if err != nil { + return err + } + case "closed": + tradeHistoryService, ok := session.Exchange.(types.ExchangeTradeHistoryService) + if !ok { + // skip exchanges that does not support trading history services + log.Warnf("exchange %s does not implement ExchangeTradeHistoryService, skip syncing closed orders (listOrdersCmd)", session.Exchange.Name()) + return nil + } + + os, err = tradeHistoryService.QueryClosedOrders(ctx, symbol, time.Now().Add(-3*24*time.Hour), time.Now(), 0) + if err != nil { + return err + } + default: + return fmt.Errorf("invalid status %s", status) + } + + log.Infof("%s ORDERS FROM %s SESSION", strings.ToUpper(status), session.Name) + for _, o := range os { + log.Infof("%+v", o) + } + + return nil + }, +} + +var executeOrderCmd = &cobra.Command{ + Use: "execute-order --session SESSION --symbol SYMBOL --side SIDE --target-quantity TOTAL_QUANTITY --slice-quantity SLICE_QUANTITY", + Short: "execute buy/sell on the balance/position you have on specific symbol", + SilenceUsage: true, + PreRunE: cobraInitRequired([]string{ + "symbol", + "side", + "target-quantity", + "slice-quantity", + }), + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + sessionName, err := cmd.Flags().GetString("session") + if err != nil { + return err + } + + symbol, err := cmd.Flags().GetString("symbol") + if err != nil { + return fmt.Errorf("can not get the symbol from flags: %w", err) + } + + if symbol == "" { + return fmt.Errorf("symbol not found") + } + + sideS, err := cmd.Flags().GetString("side") + if err != nil { + return fmt.Errorf("can't get side: %w", err) + } + + side, err := types.StrToSideType(sideS) + if err != nil { + return err + } + + targetQuantityS, err := cmd.Flags().GetString("target-quantity") + if err != nil { + return err + } + if len(targetQuantityS) == 0 { + return errors.New("--target-quantity can not be empty") + } + + targetQuantity, err := fixedpoint.NewFromString(targetQuantityS) + if err != nil { + return err + } + + sliceQuantityS, err := cmd.Flags().GetString("slice-quantity") + if err != nil { + return err + } + if len(sliceQuantityS) == 0 { + return errors.New("--slice-quantity can not be empty") + } + + sliceQuantity, err := fixedpoint.NewFromString(sliceQuantityS) + if err != nil { + return err + } + + numOfPriceTicks, err := cmd.Flags().GetInt("price-ticks") + if err != nil { + return err + } + + stopPriceS, err := cmd.Flags().GetString("stop-price") + if err != nil { + return err + } + + stopPrice, err := fixedpoint.NewFromString(stopPriceS) + if err != nil { + return err + } + + updateInterval, err := cmd.Flags().GetDuration("update-interval") + if err != nil { + return err + } + + deadlineDuration, err := cmd.Flags().GetDuration("deadline") + if err != nil { + return err + } + + var deadlineTime time.Time + if deadlineDuration > 0 { + deadlineTime = time.Now().Add(deadlineDuration) + } + + environ := bbgo.NewEnvironment() + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return err + } + + if err := environ.Init(ctx); err != nil { + return err + } + + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + + executionCtx, cancelExecution := context.WithCancel(ctx) + defer cancelExecution() + + execution := &bbgo.TwapExecution{ + Session: session, + Symbol: symbol, + Side: side, + TargetQuantity: targetQuantity, + SliceQuantity: sliceQuantity, + StopPrice: stopPrice, + NumOfTicks: numOfPriceTicks, + UpdateInterval: updateInterval, + DeadlineTime: deadlineTime, + } + + if err := execution.Run(executionCtx); err != nil { + return err + } + + var sigC = make(chan os.Signal, 1) + signal.Notify(sigC, syscall.SIGINT, syscall.SIGTERM) + defer signal.Stop(sigC) + + select { + case sig := <-sigC: + log.Warnf("signal %v", sig) + log.Infof("shutting down order executor...") + shutdownCtx, cancelShutdown := context.WithDeadline(ctx, time.Now().Add(10*time.Second)) + execution.Shutdown(shutdownCtx) + cancelShutdown() + + case <-execution.Done(): + log.Infof("the order execution is completed") + + case <-ctx.Done(): + + } + + return nil + }, +} + +// go run ./cmd/bbgo submit-order --session=ftx --symbol=BTCUSDT --side=buy --price=18000 --quantity=0.001 +var submitOrderCmd = &cobra.Command{ + Use: "submit-order --session SESSION --symbol SYMBOL --side SIDE --quantity QUANTITY [--price PRICE]", + Short: "place order to the exchange", + SilenceUsage: true, + PreRunE: cobraInitRequired([]string{ + "session", + "symbol", + "side", + "price", + "quantity", + }), + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + sessionName, err := cmd.Flags().GetString("session") + if err != nil { + return err + } + + symbol, err := cmd.Flags().GetString("symbol") + if err != nil { + return fmt.Errorf("can't get the symbol from flags: %w", err) + } + if symbol == "" { + return fmt.Errorf("symbol is not found") + } + + side, err := cmd.Flags().GetString("side") + if err != nil { + return fmt.Errorf("can not get side: %w", err) + } + + price, err := cmd.Flags().GetString("price") + if err != nil { + return fmt.Errorf("can not get price: %w", err) + } + + quantity, err := cmd.Flags().GetString("quantity") + if err != nil { + return fmt.Errorf("can not get quantity: %w", err) + } + + environ := bbgo.NewEnvironment() + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return err + } + + if err := environ.Init(ctx); err != nil { + return err + } + + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + + market, ok := session.Market(symbol) + if !ok { + return fmt.Errorf("market definition %s not found", symbol) + } + + so := types.SubmitOrder{ + Symbol: symbol, + Side: types.SideType(strings.ToUpper(side)), + Type: types.OrderTypeLimit, + Quantity: fixedpoint.MustNewFromString(quantity), + Price: fixedpoint.MustNewFromString(price), + Market: market, + TimeInForce: "GTC", + } + + co, err := session.Exchange.SubmitOrders(ctx, so) + if err != nil { + return err + } + + log.Infof("submitted order: %+v\ncreated order: %+v", so, co[0]) + return nil + }, +} + +func init() { + listOrdersCmd.Flags().String("session", "", "the exchange session name for sync") + listOrdersCmd.Flags().String("symbol", "", "the trading pair, like btcusdt") + + getOrderCmd.Flags().String("session", "", "the exchange session name for sync") + getOrderCmd.Flags().String("symbol", "", "the trading pair, like btcusdt") + getOrderCmd.Flags().String("order-id", "", "order id") + + submitOrderCmd.Flags().String("session", "", "the exchange session name for sync") + submitOrderCmd.Flags().String("symbol", "", "the trading pair, like btcusdt") + submitOrderCmd.Flags().String("side", "", "the trading side: buy or sell") + submitOrderCmd.Flags().String("price", "", "the trading price") + submitOrderCmd.Flags().String("quantity", "", "the trading quantity") + + executeOrderCmd.Flags().String("session", "", "the exchange session name for sync") + executeOrderCmd.Flags().String("symbol", "", "the trading pair, like btcusdt") + executeOrderCmd.Flags().String("side", "", "the trading side: buy or sell") + executeOrderCmd.Flags().String("target-quantity", "", "target quantity") + executeOrderCmd.Flags().String("slice-quantity", "", "slice quantity") + executeOrderCmd.Flags().String("stop-price", "0", "stop price") + executeOrderCmd.Flags().Duration("update-interval", time.Second*10, "order update time") + executeOrderCmd.Flags().Duration("deadline", 0, "deadline of the order execution") + executeOrderCmd.Flags().Int("price-ticks", 0, "the number of price tick for the jump spread, default to 0") + + RootCmd.AddCommand(listOrdersCmd) + RootCmd.AddCommand(getOrderCmd) + RootCmd.AddCommand(submitOrderCmd) + RootCmd.AddCommand(executeOrderCmd) +} diff --git a/pkg/cmd/pnl.go b/pkg/cmd/pnl.go index 3eb9c01154..ccb0362a91 100644 --- a/pkg/cmd/pnl.go +++ b/pkg/cmd/pnl.go @@ -2,107 +2,168 @@ package cmd import ( "context" + "errors" + "fmt" + "sort" "strings" "time" - "github.com/pkg/errors" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" - "github.com/spf13/viper" - "github.com/c9s/bbgo/pkg/accounting" "github.com/c9s/bbgo/pkg/accounting/pnl" "github.com/c9s/bbgo/pkg/bbgo" - "github.com/c9s/bbgo/pkg/cmd/cmdutil" "github.com/c9s/bbgo/pkg/service" "github.com/c9s/bbgo/pkg/types" ) func init() { - PnLCmd.Flags().String("exchange", "", "target exchange") - PnLCmd.Flags().String("symbol", "BTCUSDT", "trading symbol") + PnLCmd.Flags().String("session", "", "target exchange") + PnLCmd.Flags().String("symbol", "", "trading symbol") + PnLCmd.Flags().Bool("include-transfer", false, "convert transfer records into trades") + PnLCmd.Flags().Int("limit", 0, "number of trades") RootCmd.AddCommand(PnLCmd) } var PnLCmd = &cobra.Command{ Use: "pnl", - Short: "pnl calculator", + Short: "Average Cost Based PnL Calculator", + Long: "This command calculates the average cost-based profit from your total trades", SilenceUsage: true, RunE: func(cmd *cobra.Command, args []string) error { ctx := context.Background() - exchangeNameStr, err := cmd.Flags().GetString("exchange") + sessionName, err := cmd.Flags().GetString("session") if err != nil { return err } - exchangeName, err := types.ValidExchangeName(exchangeNameStr) + symbol, err := cmd.Flags().GetString("symbol") if err != nil { return err } - symbol, err := cmd.Flags().GetString("symbol") + if len(symbol) == 0 { + return errors.New("--symbol [SYMBOL] is required") + } + + limit, err := cmd.Flags().GetInt("limit") if err != nil { return err } - exchange, err := cmdutil.NewExchange(exchangeName) - if err != nil { + environ := bbgo.NewEnvironment() + + if err := environ.ConfigureDatabase(ctx); err != nil { + return err + } + + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return err + } + + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + + if err := environ.SyncSession(ctx, session); err != nil { return err } - db, err := bbgo.ConnectMySQL(viper.GetString("mysql-url")) + if err = environ.Init(ctx); err != nil { + return err + } + + exchange := session.Exchange + + market, ok := session.Market(symbol) + if !ok { + return fmt.Errorf("market config %s not found", symbol) + } + + since := time.Now().AddDate(-1, 0, 0) + until := time.Now() + + includeTransfer, err := cmd.Flags().GetBool("include-transfer") if err != nil { return err } - tradeService := &service.TradeService{DB: db} + if includeTransfer { + transferService, ok := exchange.(types.ExchangeTransferService) + if !ok { + return fmt.Errorf("session exchange %s does not implement transfer service", sessionName) + } + + deposits, err := transferService.QueryDepositHistory(ctx, market.BaseCurrency, since, until) + if err != nil { + return err + } + _ = deposits + + withdrawals, err := transferService.QueryWithdrawHistory(ctx, market.BaseCurrency, since, until) + if err != nil { + return err + } + + sort.Slice(withdrawals, func(i, j int) bool { + a := withdrawals[i].ApplyTime.Time() + b := withdrawals[j].ApplyTime.Time() + return a.Before(b) + }) + + // we need the backtest klines for the daily prices + backtestService := &service.BacktestService{DB: environ.DatabaseService.DB} + if err := backtestService.Sync(ctx, exchange, symbol, types.Interval1d, since, until); err != nil { + return err + } + } var trades []types.Trade tradingFeeCurrency := exchange.PlatformFeeCurrency() if strings.HasPrefix(symbol, tradingFeeCurrency) { log.Infof("loading all trading fee currency related trades: %s", symbol) - trades, err = tradeService.QueryForTradingFeeCurrency(exchange.Name(), symbol, tradingFeeCurrency) + trades, err = environ.TradeService.QueryForTradingFeeCurrency(exchange.Name(), symbol, tradingFeeCurrency) } else { - trades, err = tradeService.Query(exchange.Name(), symbol) + trades, err = environ.TradeService.Query(service.QueryTradesOptions{ + Symbol: symbol, + Limit: limit, + }) } if err != nil { return err } - log.Infof("%d trades loaded", len(trades)) - - stockManager := &accounting.StockDistribution{ - Symbol: symbol, - TradingFeeCurrency: tradingFeeCurrency, + if len(trades) == 0 { + return errors.New("empty trades, you need to run sync command to sync the trades from the exchange first") } - checkpoints, err := stockManager.AddTrades(trades) + trades = types.SortTradesAscending(trades) + + log.Infof("%d trades loaded", len(trades)) + + tickers, err := exchange.QueryTickers(ctx, symbol) if err != nil { return err } - log.Infof("found checkpoints: %+v", checkpoints) - log.Infof("stock: %f", stockManager.Stocks.Quantity()) - - now := time.Now() - kLines, err := exchange.QueryKLines(ctx, symbol, types.Interval1m, types.KLineQueryOptions{ - Limit: 100, - EndTime: &now, - }) - - if len(kLines) == 0 { - return errors.New("no kline data for current price") + currentTick, ok := tickers[symbol] + if !ok { + return errors.New("no ticker data for current price") } - currentPrice := kLines[len(kLines)-1].Close + currentPrice := currentTick.Last calculator := &pnl.AverageCostCalculator{ TradingFeeCurrency: tradingFeeCurrency, } report := calculator.Calculate(symbol, trades, currentPrice) report.Print() + + log.Warnf("note that if you're using cross-exchange arbitrage, the PnL won't be accurate") + log.Warnf("withdrawal and deposits are not considered in the PnL") return nil }, } diff --git a/pkg/cmd/root.go b/pkg/cmd/root.go index 7bf9e0e470..0bfec7e668 100644 --- a/pkg/cmd/root.go +++ b/pkg/cmd/root.go @@ -1,37 +1,153 @@ package cmd import ( + "net/http" "os" "path" + "runtime/pprof" "strings" "time" + "github.com/joho/godotenv" "github.com/lestrrat-go/file-rotatelogs" + "github.com/pkg/errors" + "github.com/prometheus/client_golang/prometheus/promhttp" "github.com/rifflock/lfshook" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" "github.com/spf13/viper" "github.com/x-cray/logrus-prefixed-formatter" + "github.com/c9s/bbgo/pkg/bbgo" + _ "github.com/go-sql-driver/mysql" ) +var cpuProfileFile *os.File + +var userConfig *bbgo.Config + var RootCmd = &cobra.Command{ Use: "bbgo", - Short: "bbgo trade bot", - Long: "bitcoin trader", + Short: "bbgo is a crypto trading bot", // SilenceUsage is an option to silence usage when an error occurs. SilenceUsage: true, + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + if err := cobraLoadDotenv(cmd, args); err != nil { + return err + } + + if viper.GetBool("debug") { + log.Infof("debug mode is enabled") + log.SetLevel(log.DebugLevel) + } + + if viper.GetBool("metrics") { + http.Handle("/metrics", promhttp.Handler()) + go func() { + port := viper.GetString("metrics-port") + log.Infof("starting metrics server at :%s", port) + err := http.ListenAndServe(":"+port, nil) + if err != nil { + log.WithError(err).Errorf("metrics server error") + } + }() + } + + cpuProfile, err := cmd.Flags().GetString("cpu-profile") + if err != nil { + return err + } + + if cpuProfile != "" { + log.Infof("starting cpu profiler...") + + cpuProfileFile, err = os.Create(cpuProfile) + if err != nil { + log.Fatal("could not create CPU profile: ", err) + } + + if err := pprof.StartCPUProfile(cpuProfileFile); err != nil { + log.Fatal("could not start CPU profile: ", err) + } + } + + return cobraLoadConfig(cmd, args) + }, + PersistentPostRunE: func(cmd *cobra.Command, args []string) error { + pprof.StopCPUProfile() + if cpuProfileFile != nil { + return cpuProfileFile.Close() // error handling omitted for example + } + + return nil + }, RunE: func(cmd *cobra.Command, args []string) error { return nil }, } +func cobraLoadDotenv(cmd *cobra.Command, args []string) error { + disableDotEnv, err := cmd.Flags().GetBool("no-dotenv") + if err != nil { + return err + } + + if !disableDotEnv { + dotenvFile, err := cmd.Flags().GetString("dotenv") + if err != nil { + return err + } + + if _, err := os.Stat(dotenvFile); err == nil { + if err := godotenv.Load(dotenvFile); err != nil { + return errors.Wrap(err, "error loading dotenv file") + } + } + } + return nil +} + +func cobraLoadConfig(cmd *cobra.Command, args []string) error { + configFile, err := cmd.Flags().GetString("config") + if err != nil { + return errors.Wrapf(err, "failed to get the config flag") + } + + // load config file nicely + if len(configFile) > 0 { + // if config file exists, use the config loaded from the config file. + // otherwise, use an empty config object + if _, err := os.Stat(configFile); err == nil { + // load successfully + userConfig, err = bbgo.Load(configFile, false) + if err != nil { + return errors.Wrapf(err, "can not load config file: %s", configFile) + } + + } else if os.IsNotExist(err) { + // config file doesn't exist, we should use the empty config + userConfig = &bbgo.Config{} + } else { + // other error + return errors.Wrapf(err, "config file load error: %s", configFile) + } + } + + return nil +} + func init() { - RootCmd.PersistentFlags().Bool("debug", false, "debug flag") - RootCmd.PersistentFlags().String("config", "config/bbgo.yaml", "config file") + RootCmd.PersistentFlags().Bool("debug", false, "debug mode") + RootCmd.PersistentFlags().Bool("metrics", false, "enable prometheus metrics") + RootCmd.PersistentFlags().String("metrics-port", "9090", "prometheus http server port") + + RootCmd.PersistentFlags().Bool("no-dotenv", false, "disable built-in dotenv") + RootCmd.PersistentFlags().String("dotenv", ".env.local", "the dotenv file you want to load") + + RootCmd.PersistentFlags().String("config", "bbgo.yaml", "config file") // A flag can be 'persistent' meaning that this flag will be available to // the command it's assigned to as well as every command under that command. @@ -41,16 +157,19 @@ func init() { RootCmd.PersistentFlags().String("slack-error-channel", "bbgo-error", "slack error channel") RootCmd.PersistentFlags().String("telegram-bot-token", "", "telegram bot token from bot father") - RootCmd.PersistentFlags().String("telegram-auth-token", "", "telegram auth token") + RootCmd.PersistentFlags().String("telegram-bot-auth-token", "", "telegram auth token") RootCmd.PersistentFlags().String("binance-api-key", "", "binance api key") RootCmd.PersistentFlags().String("binance-api-secret", "", "binance api secret") RootCmd.PersistentFlags().String("max-api-key", "", "max api key") RootCmd.PersistentFlags().String("max-api-secret", "", "max api secret") -} -func Execute() { + RootCmd.PersistentFlags().String("ftx-api-key", "", "ftx api key") + RootCmd.PersistentFlags().String("ftx-api-secret", "", "ftx api secret") + RootCmd.PersistentFlags().String("ftx-subaccount", "", "subaccount name. Specify it if the credential is for subaccount.") + RootCmd.PersistentFlags().String("cpu-profile", "", "cpu profile") + viper.SetEnvKeyReplacer(strings.NewReplacer("-", "_")) // Enable environment variable binding, the env vars are not overloaded yet. @@ -71,23 +190,16 @@ func Execute() { log.WithError(err).Fatal("failed to load config file") } */ - // Once the flags are defined, we can bind config keys with flags. if err := viper.BindPFlags(RootCmd.PersistentFlags()); err != nil { log.WithError(err).Errorf("failed to bind persistent flags. please check the flag settings.") - } - - if err := viper.BindPFlags(RootCmd.Flags()); err != nil { - log.WithError(err).Errorf("failed to bind local flags. please check the flag settings.") + return } log.SetFormatter(&prefixed.TextFormatter{}) +} - logger := log.StandardLogger() - if viper.GetBool("debug") { - logger.SetLevel(log.DebugLevel) - } - +func Execute() { environment := os.Getenv("BBGO_ENV") switch environment { case "production", "prod": @@ -101,6 +213,7 @@ func Execute() { if err != nil { log.Panic(err) } + logger := log.StandardLogger() logger.AddHook( lfshook.NewHook( lfshook.WriterMap{ diff --git a/pkg/cmd/run.go b/pkg/cmd/run.go index e3b2683d63..ca634c0a5a 100644 --- a/pkg/cmd/run.go +++ b/pkg/cmd/run.go @@ -1,43 +1,40 @@ package cmd import ( - "bytes" "context" - "fmt" - "image/png" "io/ioutil" "os" "os/exec" "path/filepath" - "strings" + "runtime/pprof" "syscall" "time" - "github.com/gin-gonic/gin" "github.com/pkg/errors" - "github.com/pquerna/otp" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" flag "github.com/spf13/pflag" - "github.com/spf13/viper" - tb "gopkg.in/tucnak/telebot.v2" "github.com/c9s/bbgo/pkg/bbgo" "github.com/c9s/bbgo/pkg/cmd/cmdutil" - "github.com/c9s/bbgo/pkg/notifier/slacknotifier" - "github.com/c9s/bbgo/pkg/notifier/telegramnotifier" - "github.com/c9s/bbgo/pkg/service" - "github.com/c9s/bbgo/pkg/slack/slacklog" + "github.com/c9s/bbgo/pkg/grpc" + "github.com/c9s/bbgo/pkg/server" ) func init() { RunCmd.Flags().Bool("no-compile", false, "do not compile wrapper binary") - + RunCmd.Flags().Bool("no-sync", false, "do not sync on startup") RunCmd.Flags().String("totp-key-url", "", "time-based one-time password key URL, if defined, it will be used for restoring the otp key") RunCmd.Flags().String("totp-issuer", "", "") RunCmd.Flags().String("totp-account-name", "", "") + RunCmd.Flags().Bool("enable-webserver", false, "enable webserver") + RunCmd.Flags().Bool("enable-web-server", false, "legacy option, this is renamed to --enable-webserver") + RunCmd.Flags().String("webserver-bind", ":8080", "webserver binding") + + RunCmd.Flags().Bool("enable-grpc", false, "enable grpc server") + RunCmd.Flags().String("grpc-bind", ":50051", "grpc server binding") - RunCmd.Flags().String("since", "", "pnl since time") + RunCmd.Flags().Bool("setup", false, "use setup mode") RootCmd.AddCommand(RunCmd) } @@ -50,295 +47,288 @@ var RunCmd = &cobra.Command{ RunE: run, } -func runServer(ctx context.Context, userConfig *bbgo.Config, environ *bbgo.Environment) error { - r := gin.Default() - r.GET("/ping", func(c *gin.Context) { - c.JSON(200, gin.H{"message": "pong"}) - }) - - r.GET("/sessions", func(c *gin.Context) { - c.JSON(200, gin.H{"message": "pong"}) - }) - - r.GET("/sessions/:session/trades", func(c *gin.Context) { - c.JSON(200, gin.H{"message": "pong"}) - }) - - r.GET("/sessions/:session/open-orders", func(c *gin.Context) { - c.JSON(200, gin.H{"message": "pong"}) - }) - - r.GET("/sessions/:session/closed-orders", func(c *gin.Context) { - c.JSON(200, gin.H{"message": "pong"}) - }) +func runSetup(baseCtx context.Context, userConfig *bbgo.Config, enableApiServer bool) error { + ctx, cancelTrading := context.WithCancel(baseCtx) + defer cancelTrading() - r.GET("/sessions/:session/loaded-symbols", func(c *gin.Context) { - c.JSON(200, gin.H{"message": "pong"}) - }) + environ := bbgo.NewEnvironment() - r.GET("/sessions/:session/pnl", func(c *gin.Context) { - c.JSON(200, gin.H{"message": "pong"}) - }) + trader := bbgo.NewTrader(environ) - r.GET("/sessions/:session/market/:symbol/closed-orders", func(c *gin.Context) { - c.JSON(200, gin.H{"message": "pong"}) - }) + if enableApiServer { + go func() { + s := &server.Server{ + Config: userConfig, + Environ: environ, + Trader: trader, + OpenInBrowser: true, + Setup: &server.Setup{ + Context: ctx, + Cancel: cancelTrading, + Token: "", + }, + } - r.GET("/sessions/:session/market/:symbol/open-orders", func(c *gin.Context) { - c.JSON(200, gin.H{"message": "pong"}) - }) + if err := s.Run(ctx); err != nil { + log.WithError(err).Errorf("server error") + } + }() + } - r.GET("/sessions/:session/market/:symbol/trades", func(c *gin.Context) { - c.JSON(200, gin.H{"message": "pong"}) - }) + cmdutil.WaitForSignal(ctx, syscall.SIGINT, syscall.SIGTERM) + cancelTrading() - r.GET("/sessions/:session/market/:symbol/pnl", func(c *gin.Context) { - c.JSON(200, gin.H{"message": "pong"}) - }) + // graceful period = 15 second + shutdownCtx, cancelShutdown := context.WithDeadline(ctx, time.Now().Add(15*time.Second)) - return r.Run() // listen and serve on 0.0.0.0:8080 (for windows "localhost:8080") + log.Infof("shutting down...") + trader.Graceful.Shutdown(shutdownCtx) + cancelShutdown() + return nil } -func runConfig(basectx context.Context, userConfig *bbgo.Config) error { - ctx, cancelTrading := context.WithCancel(basectx) - defer cancelTrading() - - environ := bbgo.NewEnvironment() +func BootstrapBacktestEnvironment(ctx context.Context, environ *bbgo.Environment) error { + return environ.ConfigureDatabase(ctx) +} +func BootstrapEnvironment(ctx context.Context, environ *bbgo.Environment, userConfig *bbgo.Config) error { if err := environ.ConfigureDatabase(ctx); err != nil { return err } - if err := environ.AddExchangesFromConfig(userConfig); err != nil { - return err + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return errors.Wrap(err, "exchange session configure error") } if userConfig.Persistence != nil { if err := environ.ConfigurePersistence(userConfig.Persistence); err != nil { - return err + return errors.Wrap(err, "persistence configure error") } } - notification := bbgo.Notifiability{ - SymbolChannelRouter: bbgo.NewPatternChannelRouter(nil), - SessionChannelRouter: bbgo.NewPatternChannelRouter(nil), - ObjectChannelRouter: bbgo.NewObjectChannelRouter(), + if err := environ.ConfigureNotificationSystem(userConfig); err != nil { + return errors.Wrap(err, "notification configure error") } - // for slack - slackToken := viper.GetString("slack-token") - if len(slackToken) > 0 && userConfig.Notifications != nil { - if conf := userConfig.Notifications.Slack; conf != nil { - if conf.ErrorChannel != "" { - log.Infof("found slack configured, setting up log hook...") - log.AddHook(slacklog.NewLogHook(slackToken, conf.ErrorChannel)) - } + return nil +} - log.Infof("adding slack notifier with default channel: %s", conf.DefaultChannel) - var notifier = slacknotifier.New(slackToken, conf.DefaultChannel) - notification.AddNotifier(notifier) - } +func runConfig(basectx context.Context, cmd *cobra.Command, userConfig *bbgo.Config) error { + noSync, err := cmd.Flags().GetBool("no-sync") + if err != nil { + return err } - // for telegram - telegramBotToken := viper.GetString("telegram-bot-token") - telegramBotAuthToken := viper.GetString("telegram-bot-auth-token") - if len(telegramBotToken) > 0 { - log.Infof("initializing telegram bot...") - - bot, err := tb.NewBot(tb.Settings{ - // You can also set custom API URL. - // If field is empty it equals to "https://api.telegram.org". - // URL: "http://195.129.111.17:8012", - Token: telegramBotToken, - Poller: &tb.LongPoller{Timeout: 10 * time.Second}, - }) - - if err != nil { - return err - } - - var persistence bbgo.PersistenceService = bbgo.NewMemoryService() - var sessionStore = persistence.NewStore("bbgo", "telegram") - - tt := strings.Split(bot.Token, ":") - telegramID := tt[0] + enableWebServer, err := cmd.Flags().GetBool("enable-webserver") + if err != nil { + return err + } - if environ.PersistenceServiceFacade != nil { - if environ.PersistenceServiceFacade.Redis != nil { - persistence = environ.PersistenceServiceFacade.Redis - sessionStore = persistence.NewStore("bbgo", "telegram", telegramID) - } - } + webServerBind, err := cmd.Flags().GetString("webserver-bind") + if err != nil { + return err + } - interaction := telegramnotifier.NewInteraction(bot, sessionStore) - - if len(telegramBotAuthToken) > 0 { - log.Infof("telegram bot auth token is set, using fixed token for authorization...") - interaction.SetAuthToken(telegramBotAuthToken) - log.Infof("send the following command to the bbgo bot you created to enable the notification") - log.Infof("") - log.Infof("") - log.Infof(" /auth %s", telegramBotAuthToken) - log.Infof("") - log.Infof("") - } + enableWebServerLegacy, err := cmd.Flags().GetBool("enable-web-server") + if err != nil { + return err + } + if enableWebServerLegacy { + log.Warn("command option --enable-web-server is renamed to --enable-webserver") + enableWebServer = true + } - var session telegramnotifier.Session - if err := sessionStore.Load(&session); err != nil || session.Owner == nil { - log.Warnf("session not found, generating new one-time password key for new session...") + enableGrpc, err := cmd.Flags().GetBool("enable-grpc") + if err != nil { + return err + } - key, err := service.NewDefaultTotpKey() - if err != nil { - return errors.Wrapf(err, "failed to setup totp (time-based one time password) key") - } + grpcBind, err := cmd.Flags().GetString("grpc-bind") + if err != nil { + return err + } - displayOTPKey(key) - - qrcodeImagePath := fmt.Sprintf("otp-%s.png", telegramID) - - err = writeOTPKeyAsQRCodePNG(key, qrcodeImagePath) - log.Infof("To scan your OTP QR code, please run the following command:") - log.Infof("") - log.Infof("") - log.Infof(" open %s", qrcodeImagePath) - log.Infof("") - log.Infof("") - log.Infof("send the auth command with the generated one-time password to the bbgo bot you created to enable the notification") - log.Infof("") - log.Infof("") - log.Infof(" /auth {code}") - log.Infof("") - log.Infof("") - - session = telegramnotifier.NewSession(key) - if err := sessionStore.Save(&session); err != nil { - return errors.Wrap(err, "failed to save session") - } - } + _ = grpcBind + _ = enableGrpc - go interaction.Start(session) + ctx, cancelTrading := context.WithCancel(basectx) + defer cancelTrading() - var notifier = telegramnotifier.New(interaction) - notification.AddNotifier(notifier) + environ := bbgo.NewEnvironment() + if err := BootstrapEnvironment(ctx, environ, userConfig); err != nil { + return err } - environ.Notifiability = notification + if err := environ.Init(ctx); err != nil { + return err + } - if userConfig.Notifications != nil { - if err := environ.ConfigureNotification(userConfig.Notifications); err != nil { + if !noSync { + if err := environ.Sync(ctx, userConfig); err != nil { return err } + + if userConfig.Sync != nil { + environ.BindSync(userConfig.Sync) + } } trader := bbgo.NewTrader(environ) - - if userConfig.RiskControls != nil { - trader.SetRiskControls(userConfig.RiskControls) + if err := trader.Configure(userConfig); err != nil { + return err } - for _, entry := range userConfig.ExchangeStrategies { - for _, mount := range entry.Mounts { - log.Infof("attaching strategy %T on %s...", entry.Strategy, mount) - trader.AttachStrategyOn(mount, entry.Strategy) - } + if err := trader.LoadState(); err != nil { + return err } - for _, strategy := range userConfig.CrossExchangeStrategies { - log.Infof("attaching cross exchange strategy %T", strategy) - trader.AttachCrossExchangeStrategy(strategy) + if err := trader.Run(ctx); err != nil { + return err } - for _, report := range userConfig.PnLReporters { - if len(report.AverageCostBySymbols) > 0 { - - log.Infof("setting up average cost pnl reporter on symbols: %v", report.AverageCostBySymbols) - trader.ReportPnL(). - AverageCostBySymbols(report.AverageCostBySymbols...). - Of(report.Of...). - When(report.When...) + if enableWebServer { + go func() { + s := &server.Server{ + Config: userConfig, + Environ: environ, + Trader: trader, + } - } else { - return fmt.Errorf("unsupported PnL reporter: %+v", report) - } + if err := s.Run(ctx, webServerBind); err != nil { + log.WithError(err).Errorf("http server bind error") + } + }() } - if err := trader.Run(ctx); err != nil { - return err + if enableGrpc { + go func() { + s := &grpc.Server{ + Config: userConfig, + Environ: environ, + Trader: trader, + } + if err := s.ListenAndServe(grpcBind); err != nil { + log.WithError(err).Errorf("grpc server bind error") + } + }() } cmdutil.WaitForSignal(ctx, syscall.SIGINT, syscall.SIGTERM) - cancelTrading() - shutdownCtx, cancelShutdown := context.WithDeadline(ctx, time.Now().Add(30*time.Second)) - log.Infof("shutting down...") + shutdownCtx, cancelShutdown := context.WithDeadline(ctx, time.Now().Add(30*time.Second)) trader.Graceful.Shutdown(shutdownCtx) cancelShutdown() + + if err := trader.SaveState(); err != nil { + log.WithError(err).Errorf("can not save strategy states") + } + + for _, session := range environ.Sessions() { + if err := session.MarketDataStream.Close(); err != nil { + log.WithError(err).Errorf("[%s] market data stream close error", session.Name) + } + if err := session.UserDataStream.Close(); err != nil { + log.WithError(err).Errorf("[%s] user data stream close error", session.Name) + } + } + return nil } func run(cmd *cobra.Command, args []string) error { - configFile, err := cmd.Flags().GetString("config") + setup, err := cmd.Flags().GetBool("setup") if err != nil { return err } - if len(configFile) == 0 { - return errors.New("--config option is required") - } - noCompile, err := cmd.Flags().GetBool("no-compile") if err != nil { return err } - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() + configFile, err := cmd.Flags().GetString("config") + if err != nil { + return err + } - userConfig, err := bbgo.Load(configFile, false) + cpuProfile, err := cmd.Flags().GetString("cpu-profile") if err != nil { return err } - // for wrapper binary, we can just run the strategies - if bbgo.IsWrapperBinary || (userConfig.Build != nil && len(userConfig.Build.Imports) == 0) || noCompile { - userConfig, err = bbgo.Load(configFile, true) - if err != nil { + if !setup { + // if it's not setup, then the config file option is required. + if len(configFile) == 0 { + return errors.New("--config option is required") + } + + if _, err := os.Stat(configFile); err != nil { return err } + } + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + // for wrapper binary, we can just run the strategies + if bbgo.IsWrapperBinary || (userConfig.Build != nil && len(userConfig.Build.Imports) == 0) || noCompile { if bbgo.IsWrapperBinary { log.Infof("running wrapper binary...") } - if err := runConfig(ctx, userConfig); err != nil { - return err + if setup { + return runSetup(ctx, userConfig, true) } - return nil - } else { - var runArgs = []string{"run"} - cmd.Flags().Visit(func(flag *flag.Flag) { - runArgs = append(runArgs, "--"+flag.Name, flag.Value.String()) - }) - runArgs = append(runArgs, args...) - - runCmd, err := buildAndRun(ctx, userConfig, runArgs...) + // default setting is false, here load as true + userConfig, err = bbgo.Load(configFile, true) if err != nil { return err } - if sig := cmdutil.WaitForSignal(ctx, syscall.SIGTERM, syscall.SIGINT); sig != nil { - log.Infof("sending signal to the child process...") - if err := runCmd.Process.Signal(sig); err != nil { - return err + if cpuProfile != "" { + f, err := os.Create(cpuProfile) + if err != nil { + log.Fatal("could not create CPU profile: ", err) } + defer f.Close() // error handling omitted for example - if err := runCmd.Wait(); err != nil { - return err + if err := pprof.StartCPUProfile(f); err != nil { + log.Fatal("could not start CPU profile: ", err) } + + defer pprof.StopCPUProfile() + } + + return runConfig(ctx, cmd, userConfig) + } + + return runWrapperBinary(ctx, cmd, userConfig, args) +} + +func runWrapperBinary(ctx context.Context, cmd *cobra.Command, userConfig *bbgo.Config, args []string) error { + var runArgs = []string{"run"} + cmd.Flags().Visit(func(flag *flag.Flag) { + runArgs = append(runArgs, "--"+flag.Name, flag.Value.String()) + }) + runArgs = append(runArgs, args...) + + runCmd, err := buildAndRun(ctx, userConfig, runArgs...) + if err != nil { + return err + } + + if sig := cmdutil.WaitForSignal(ctx, syscall.SIGTERM, syscall.SIGINT); sig != nil { + log.Infof("sending signal to the child process...") + if err := runCmd.Process.Signal(sig); err != nil { + return err + } + + if err := runCmd.Wait(); err != nil { + return err } } @@ -371,35 +361,3 @@ func buildAndRun(ctx context.Context, userConfig *bbgo.Config, args ...string) ( runCmd.Stderr = os.Stderr return runCmd, runCmd.Start() } - -func writeOTPKeyAsQRCodePNG(key *otp.Key, imagePath string) error { - // Convert TOTP key into a PNG - var buf bytes.Buffer - img, err := key.Image(512, 512) - if err != nil { - return err - } - - if err := png.Encode(&buf, img); err != nil { - return err - } - - if err := ioutil.WriteFile(imagePath, buf.Bytes(), 0644); err != nil { - return err - } - - return nil -} - -func displayOTPKey(key *otp.Key) { - log.Infof("") - log.Infof("====================PLEASE STORE YOUR OTP KEY=======================") - log.Infof("") - log.Infof("Issuer: %s", key.Issuer()) - log.Infof("AccountName: %s", key.AccountName()) - log.Infof("Secret: %s", key.Secret()) - log.Infof("Key URL: %s", key.URL()) - log.Infof("") - log.Infof("====================================================================") - log.Infof("") -} diff --git a/pkg/cmd/sync.go b/pkg/cmd/sync.go index 0520ec8764..a898c3fe4a 100644 --- a/pkg/cmd/sync.go +++ b/pkg/cmd/sync.go @@ -2,27 +2,25 @@ package cmd import ( "context" - "fmt" "os" "time" "github.com/pkg/errors" - log "github.com/sirupsen/logrus" "github.com/spf13/cobra" "github.com/c9s/bbgo/pkg/bbgo" ) func init() { - SyncCmd.Flags().String("session", "", "the exchange session name for sync") - SyncCmd.Flags().String("symbol", "BTCUSDT", "trading symbol") + SyncCmd.Flags().StringArray("session", []string{}, "the exchange session name for sync") + SyncCmd.Flags().String("symbol", "", "symbol of market for syncing") SyncCmd.Flags().String("since", "", "sync from time") RootCmd.AddCommand(SyncCmd) } var SyncCmd = &cobra.Command{ - Use: "sync", - Short: "sync trades, orders", + Use: "sync [--session=[exchange_name]] [--symbol=[pair_name]] [[--since=yyyy/mm/dd]]", + Short: "sync trades and orders history", SilenceUsage: true, RunE: func(cmd *cobra.Command, args []string) error { ctx := context.Background() @@ -55,28 +53,11 @@ var SyncCmd = &cobra.Command{ return err } - if err := environ.AddExchangesFromConfig(userConfig); err != nil { + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { return err } - var ( - // default start time - startTime = time.Now().AddDate(0, -3, 0) - ) - - if len(since) > 0 { - loc, err := time.LoadLocation("Asia/Taipei") - if err != nil { - return err - } - - startTime, err = time.ParseInLocation("2006-01-02", since, loc) - if err != nil { - return err - } - } - - sessionName, err := cmd.Flags().GetString("session") + sessionNames, err := cmd.Flags().GetStringArray("session") if err != nil { return err } @@ -86,44 +67,40 @@ var SyncCmd = &cobra.Command{ return err } - if len(sessionName) > 0 { - session, ok := environ.Session(sessionName) - if !ok { - return fmt.Errorf("session %s not found", sessionName) - } + var ( + // default sync start time + defaultSyncStartTime = time.Now().AddDate(-1, 0, 0) + ) + + var syncStartTime = defaultSyncStartTime - return syncSession(ctx, environ, session, symbol, startTime) + if userConfig.Sync != nil && userConfig.Sync.Since != nil { + syncStartTime = userConfig.Sync.Since.Time() } - for _, session := range environ.Sessions() { - if err := syncSession(ctx, environ, session, symbol, startTime); err != nil { + if len(since) > 0 { + syncStartTime, err = time.ParseInLocation("2006-01-02", since, time.Local) + if err != nil { return err } } - return nil - }, -} - -func syncSession(ctx context.Context, environ *bbgo.Environment, session *bbgo.ExchangeSession, symbol string, startTime time.Time) error { - log.Infof("starting syncing exchange session %s", session.Name) + environ.SetSyncStartTime(syncStartTime) - if session.IsIsolatedMargin { - log.Infof("session is configured as isolated margin session, using isolated margin symbol %s instead of %s", session.IsolatedMarginSymbol, symbol) - symbol = session.IsolatedMarginSymbol - } - - log.Infof("syncing trades from exchange session %s...", session.Name) - if err := environ.TradeSync.SyncTrades(ctx, session.Exchange, symbol, startTime); err != nil { - return err - } - - log.Infof("syncing orders from exchange session %s...", session.Name) - if err := environ.TradeSync.SyncOrders(ctx, session.Exchange, symbol, startTime); err != nil { - return err - } + if len(symbol) > 0 { + if userConfig.Sync != nil && len(userConfig.Sync.Symbols) > 0 { + userConfig.Sync.Symbols = []bbgo.SyncSymbol{ + {Symbol: symbol}, + } + } + } - log.Infof("exchange session %s synchronization done", session.Name) + if len(sessionNames) > 0 { + if userConfig.Sync != nil && len(userConfig.Sync.Sessions) > 0 { + userConfig.Sync.Sessions = sessionNames + } + } - return nil + return environ.Sync(ctx, userConfig) + }, } diff --git a/pkg/cmd/trades.go b/pkg/cmd/trades.go new file mode 100644 index 0000000000..7b93086050 --- /dev/null +++ b/pkg/cmd/trades.go @@ -0,0 +1,146 @@ +package cmd + +import ( + "context" + "fmt" + "syscall" + "time" + + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/cmd/cmdutil" + "github.com/c9s/bbgo/pkg/types" +) + +// go run ./cmd/bbgo trades --session=ftx --symbol="BTC/USD" +var tradesCmd = &cobra.Command{ + Use: "trades --session=[exchange_name] --symbol=[pair_name]", + Short: "Query trading history", + SilenceUsage: true, + PreRunE: cobraInitRequired([]string{ + "session", + "symbol", + }), + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + environ := bbgo.NewEnvironment() + + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return err + } + + sessionName, err := cmd.Flags().GetString("session") + if err != nil { + return err + } + + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + + symbol, err := cmd.Flags().GetString("symbol") + if err != nil { + return fmt.Errorf("can't get the symbol from flags: %w", err) + } + if symbol == "" { + return fmt.Errorf("symbol is not found") + } + + limit, err := cmd.Flags().GetInt64("limit") + if err != nil { + return err + } + + now := time.Now() + since := now.Add(-24 * time.Hour) + + tradeHistoryService, ok := session.Exchange.(types.ExchangeTradeHistoryService) + if !ok { + // skip exchanges that does not support trading history services + log.Warnf("exchange %s does not implement ExchangeTradeHistoryService, skip syncing closed orders (tradesCmd)", session.Exchange.Name()) + return nil + } + + trades, err := tradeHistoryService.QueryTrades(ctx, symbol, &types.TradeQueryOptions{ + StartTime: &since, + Limit: limit, + LastTradeID: 0, + }) + if err != nil { + return err + } + + log.Infof("%d trades", len(trades)) + for _, trade := range trades { + log.Infof("TRADE %s %s %4s %s @ %s orderID %d %s amount %v , fee %v %s ", + trade.Exchange.String(), + trade.Symbol, + trade.Side, + trade.Quantity.FormatString(4), + trade.Price.FormatString(3), + trade.OrderID, + trade.Time.Time().Format(time.StampMilli), + trade.QuoteQuantity, + trade.Fee, + trade.FeeCurrency) + } + return nil + }, +} + +// go run ./cmd/bbgo tradeupdate --session=ftx +var tradeUpdateCmd = &cobra.Command{ + Use: "tradeupdate --session=[exchange_name]", + Short: "Listen to trade update events", + PreRunE: cobraInitRequired([]string{ + "session", + }), + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + environ := bbgo.NewEnvironment() + + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return err + } + + sessionName, err := cmd.Flags().GetString("session") + if err != nil { + return err + } + + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + + s := session.Exchange.NewStream() + s.OnTradeUpdate(func(trade types.Trade) { + log.Infof("trade update: %+v", trade) + }) + + log.Infof("connecting...") + if err := s.Connect(ctx); err != nil { + return fmt.Errorf("failed to connect to %s", sessionName) + } + log.Infof("connected") + + cmdutil.WaitForSignal(ctx, syscall.SIGINT, syscall.SIGTERM) + return nil + }, +} + +func init() { + tradesCmd.Flags().String("session", "", "the exchange session name for querying balances") + tradesCmd.Flags().String("symbol", "", "the trading pair, like btcusdt") + tradesCmd.Flags().Int64("limit", 100, "limit") + + tradeUpdateCmd.Flags().String("session", "", "the exchange session name for querying balances") + + RootCmd.AddCommand(tradesCmd) + RootCmd.AddCommand(tradeUpdateCmd) +} diff --git a/pkg/cmd/transfer.go b/pkg/cmd/transfer.go index 93f413e7de..9932db3747 100644 --- a/pkg/cmd/transfer.go +++ b/pkg/cmd/transfer.go @@ -2,18 +2,20 @@ package cmd import ( "context" + "fmt" "sort" "time" "github.com/sirupsen/logrus" "github.com/spf13/cobra" - "github.com/c9s/bbgo/pkg/cmd/cmdutil" + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" ) func init() { - TransferHistoryCmd.Flags().String("exchange", "", "target exchange") + TransferHistoryCmd.Flags().String("session", "", "target exchange session") TransferHistoryCmd.Flags().String("asset", "", "trading symbol") TransferHistoryCmd.Flags().String("since", "", "since time") RootCmd.AddCommand(TransferHistoryCmd) @@ -45,14 +47,23 @@ var TransferHistoryCmd = &cobra.Command{ SilenceUsage: true, RunE: func(cmd *cobra.Command, args []string) error { ctx := context.Background() - _ = ctx - exchangeNameStr, err := cmd.Flags().GetString("exchange") + configFile, err := cmd.Flags().GetString("config") if err != nil { return err } - exchangeName, err := types.ValidExchangeName(exchangeNameStr) + userConfig, err := bbgo.Load(configFile, false) + if err != nil { + return err + } + + environ := bbgo.NewEnvironment() + if err := BootstrapEnvironment(ctx, environ, userConfig); err != nil { + return err + } + + sessionName, err := cmd.Flags().GetString("session") if err != nil { return err } @@ -62,6 +73,11 @@ var TransferHistoryCmd = &cobra.Command{ return err } + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + // default var now = time.Now() var since = now.AddDate(-1, 0, 0) @@ -84,10 +100,13 @@ var TransferHistoryCmd = &cobra.Command{ } } - exchange, _ := cmdutil.NewExchange(exchangeName) - var records timeSlice + exchange, ok := session.Exchange.(types.ExchangeTransferService) + if !ok { + return fmt.Errorf("exchange session %s does not implement transfer service", sessionName) + } + deposits, err := exchange.QueryDepositHistory(ctx, asset, since, until) if err != nil { return err @@ -116,10 +135,10 @@ var TransferHistoryCmd = &cobra.Command{ switch record := record.Record.(type) { case types.Deposit: - logrus.Infof("%s: <--- DEPOSIT %f %s [%s]", record.Time, record.Amount, record.Asset, record.Status) + logrus.Infof("%s: <--- DEPOSIT %v %s [%s]", record.Time, record.Amount, record.Asset, record.Status) case types.Withdraw: - logrus.Infof("%s: ---> WITHDRAW %f %s [%s]", record.ApplyTime, record.Amount, record.Asset, record.Status) + logrus.Infof("%s: ---> WITHDRAW %v %s [%s]", record.ApplyTime, record.Amount, record.Asset, record.Status) default: logrus.Infof("unknown record: %+v", record) @@ -129,15 +148,15 @@ var TransferHistoryCmd = &cobra.Command{ stats := calBaselineStats(asset, deposits, withdraws) for asset, quantity := range stats.TotalDeposit { - logrus.Infof("total %s deposit: %f", asset, quantity) + logrus.Infof("total %s deposit: %v", asset, quantity) } for asset, quantity := range stats.TotalWithdraw { - logrus.Infof("total %s withdraw: %f", asset, quantity) + logrus.Infof("total %s withdraw: %v", asset, quantity) } for asset, quantity := range stats.BaselineBalance { - logrus.Infof("baseline %s balance: %f", asset, quantity) + logrus.Infof("baseline %s balance: %v", asset, quantity) } return nil @@ -146,44 +165,44 @@ var TransferHistoryCmd = &cobra.Command{ type BaselineStats struct { Asset string - TotalDeposit map[string]float64 - TotalWithdraw map[string]float64 - BaselineBalance map[string]float64 + TotalDeposit map[string]fixedpoint.Value + TotalWithdraw map[string]fixedpoint.Value + BaselineBalance map[string]fixedpoint.Value } func calBaselineStats(asset string, deposits []types.Deposit, withdraws []types.Withdraw) (stats BaselineStats) { stats.Asset = asset - stats.TotalDeposit = make(map[string]float64) - stats.TotalWithdraw = make(map[string]float64) - stats.BaselineBalance = make(map[string]float64) + stats.TotalDeposit = make(map[string]fixedpoint.Value) + stats.TotalWithdraw = make(map[string]fixedpoint.Value) + stats.BaselineBalance = make(map[string]fixedpoint.Value) for _, deposit := range deposits { if deposit.Status == types.DepositSuccess { if _, ok := stats.TotalDeposit[deposit.Asset]; !ok { - stats.TotalDeposit[deposit.Asset] = 0.0 + stats.TotalDeposit[deposit.Asset] = fixedpoint.Zero } - stats.TotalDeposit[deposit.Asset] += deposit.Amount + stats.TotalDeposit[deposit.Asset] = stats.TotalDeposit[deposit.Asset].Add(deposit.Amount) } } for _, withdraw := range withdraws { if withdraw.Status == "completed" { if _, ok := stats.TotalWithdraw[withdraw.Asset]; !ok { - stats.TotalWithdraw[withdraw.Asset] = 0.0 + stats.TotalWithdraw[withdraw.Asset] = fixedpoint.Zero } - stats.TotalWithdraw[withdraw.Asset] += withdraw.Amount + stats.TotalWithdraw[withdraw.Asset] = stats.TotalWithdraw[withdraw.Asset].Add(withdraw.Amount) } } for asset, deposit := range stats.TotalDeposit { withdraw, ok := stats.TotalWithdraw[asset] if !ok { - withdraw = 0.0 + withdraw = fixedpoint.Zero } - stats.BaselineBalance[asset] = deposit - withdraw + stats.BaselineBalance[asset] = deposit.Sub(withdraw) } return stats diff --git a/pkg/cmd/userdatastream.go b/pkg/cmd/userdatastream.go new file mode 100644 index 0000000000..b53a1a6312 --- /dev/null +++ b/pkg/cmd/userdatastream.go @@ -0,0 +1,78 @@ +package cmd + +import ( + "context" + "fmt" + "syscall" + "time" + + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/cmd/cmdutil" + "github.com/c9s/bbgo/pkg/types" +) + +// go run ./cmd/bbgo userdatastream --session=ftx +var userDataStreamCmd = &cobra.Command{ + Use: "userdatastream", + Short: "Listen to session events (orderUpdate, tradeUpdate, balanceUpdate, balanceSnapshot)", + PreRunE: cobraInitRequired([]string{ + "session", + }), + RunE: func(cmd *cobra.Command, args []string) error { + ctx := context.Background() + + sessionName, err := cmd.Flags().GetString("session") + if err != nil { + return err + } + + environ := bbgo.NewEnvironment() + if err := environ.ConfigureExchangeSessions(userConfig); err != nil { + return err + } + + session, ok := environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + + s := session.Exchange.NewStream() + s.OnOrderUpdate(func(order types.Order) { + log.Infof("[orderUpdate] %+v", order) + }) + s.OnTradeUpdate(func(trade types.Trade) { + log.Infof("[tradeUpdate] %+v", trade) + }) + s.OnBalanceUpdate(func(trade types.BalanceMap) { + log.Infof("[balanceUpdate] %+v", trade) + }) + s.OnBalanceSnapshot(func(trade types.BalanceMap) { + log.Infof("[balanceSnapshot] %+v", trade) + }) + + log.Infof("connecting...") + if err := s.Connect(ctx); err != nil { + return fmt.Errorf("failed to connect to %s", sessionName) + } + + log.Infof("connected") + defer func() { + log.Infof("closing connection...") + if err := s.Close(); err != nil { + log.WithError(err).Errorf("connection close error") + } + time.Sleep(1 * time.Second) + }() + + cmdutil.WaitForSignal(ctx, syscall.SIGINT, syscall.SIGTERM) + return nil + }, +} + +func init() { + userDataStreamCmd.Flags().String("session", "", "session name") + RootCmd.AddCommand(userDataStreamCmd) +} diff --git a/pkg/cmd/utils.go b/pkg/cmd/utils.go new file mode 100644 index 0000000000..dda83d2ff9 --- /dev/null +++ b/pkg/cmd/utils.go @@ -0,0 +1,51 @@ +package cmd + +import ( + "fmt" + + "github.com/spf13/viper" + + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + + "github.com/c9s/bbgo/pkg/exchange/ftx" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func cobraInitRequired(required []string) func(cmd *cobra.Command, args []string) error { + return func(cmd *cobra.Command, args []string) error { + for _, key := range required { + if err := cmd.MarkFlagRequired(key); err != nil { + log.WithError(err).Errorf("cannot mark --%s option required", key) + } + } + return nil + } +} + +// inQuoteAsset converts all balances in quote asset +func inQuoteAsset(balances types.BalanceMap, market types.Market, price fixedpoint.Value) fixedpoint.Value { + quote := balances[market.QuoteCurrency] + base := balances[market.BaseCurrency] + return base.Total().Mul(price).Add(quote.Total()) +} + +func inBaseAsset(balances types.BalanceMap, market types.Market, price fixedpoint.Value) fixedpoint.Value { + quote := balances[market.QuoteCurrency] + base := balances[market.BaseCurrency] + return quote.Total().Div(price).Add(base.Total()) +} + +func newExchange(session string) (types.Exchange, error) { + switch session { + case "ftx": + return ftx.NewExchange( + viper.GetString("ftx-api-key"), + viper.GetString("ftx-api-secret"), + viper.GetString("ftx-subaccount"), + ), nil + + } + return nil, fmt.Errorf("unsupported session %s", session) +} diff --git a/pkg/cmd/version.go b/pkg/cmd/version.go new file mode 100644 index 0000000000..72393dd4f9 --- /dev/null +++ b/pkg/cmd/version.go @@ -0,0 +1,23 @@ +package cmd + +import ( + "fmt" + + "github.com/spf13/cobra" + + "github.com/c9s/bbgo/pkg/version" +) + +func init() { + // VersionCmd.Flags().String("session", "", "the exchange session name for sync") + RootCmd.AddCommand(VersionCmd) +} + +var VersionCmd = &cobra.Command{ + Use: "version", + Short: "show version name", + SilenceUsage: true, + Run: func(cmd *cobra.Command, args []string) { + fmt.Println(version.Version) + }, +} diff --git a/pkg/data/tsv/writer.go b/pkg/data/tsv/writer.go new file mode 100644 index 0000000000..e83eef72fe --- /dev/null +++ b/pkg/data/tsv/writer.go @@ -0,0 +1,36 @@ +package tsv + +import ( + "encoding/csv" + "io" + "os" +) + +type Writer struct { + file io.WriteCloser + + *csv.Writer +} + +func NewWriterFile(filename string) (*Writer, error) { + f, err := os.Create(filename) + if err != nil { + return nil, err + } + + return NewWriter(f), nil +} + +func NewWriter(file io.WriteCloser) *Writer { + tsv := csv.NewWriter(file) + tsv.Comma = '\t' + return &Writer{ + Writer: tsv, + file: file, + } +} + +func (w *Writer) Close() error { + w.Writer.Flush() + return w.file.Close() +} diff --git a/pkg/datasource/coinmarketcap/datasource.go b/pkg/datasource/coinmarketcap/datasource.go new file mode 100644 index 0000000000..9ea5b8282d --- /dev/null +++ b/pkg/datasource/coinmarketcap/datasource.go @@ -0,0 +1,13 @@ +package coinmarketcap + +import v1 "github.com/c9s/bbgo/pkg/datasource/coinmarketcap/v1" + +type DataSource struct { + client *v1.RestClient +} + +func New(apiKey string) *DataSource { + client := v1.New() + client.Auth(apiKey) + return &DataSource{client: client} +} diff --git a/pkg/datasource/coinmarketcap/v1/client.go b/pkg/datasource/coinmarketcap/v1/client.go new file mode 100644 index 0000000000..be4f3d181e --- /dev/null +++ b/pkg/datasource/coinmarketcap/v1/client.go @@ -0,0 +1,55 @@ +package v1 + +import ( + "context" + "net/http" + "net/url" + "time" + + "github.com/c9s/requestgen" +) + +const baseURL = "https://pro-api.coinmarketcap.com" +const defaultHTTPTimeout = time.Second * 15 + +type RestClient struct { + requestgen.BaseAPIClient + + apiKey string +} + +func New() *RestClient { + u, err := url.Parse(baseURL) + if err != nil { + panic(err) + } + + return &RestClient{ + BaseAPIClient: requestgen.BaseAPIClient{ + BaseURL: u, + HttpClient: &http.Client{ + Timeout: defaultHTTPTimeout, + }, + }, + } +} + +func (c *RestClient) Auth(apiKey string) { + // pragma: allowlist nextline secret + c.apiKey = apiKey +} + +func (c *RestClient) NewAuthenticatedRequest(ctx context.Context, method, refURL string, params url.Values, payload interface{}) (*http.Request, error) { + req, err := c.NewRequest(ctx, method, refURL, params, payload) + if err != nil { + return nil, err + } + + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + + // Attach API Key to header. https://coinmarketcap.com/api/documentation/v1/#section/Authentication + req.Header.Add("X-CMC_PRO_API_KEY", c.apiKey) + + return req, nil +} diff --git a/pkg/datasource/coinmarketcap/v1/listings.go b/pkg/datasource/coinmarketcap/v1/listings.go new file mode 100644 index 0000000000..35c8532c5e --- /dev/null +++ b/pkg/datasource/coinmarketcap/v1/listings.go @@ -0,0 +1,56 @@ +package v1 + +import ( + "github.com/c9s/requestgen" +) + +//go:generate requestgen -method GET -url "/v1/cryptocurrency/listings/historical" -type ListingsHistoricalRequest -responseType Response -responseDataField Data -responseDataType []Data +type ListingsHistoricalRequest struct { + Client requestgen.AuthenticatedAPIClient + + Date string `param:"date,query,required"` + Start *int `param:"start,query" default:"1"` + Limit *int `param:"limit,query" default:"100"` + Convert *string `param:"convert,query"` + ConvertID *string `param:"convert_id,query"` + Sort *string `param:"sort,query" default:"cmc_rank" validValues:"cmc_rank,name,symbol,market_cap,price,circulating_supply,total_supply,max_supply,num_market_pairs,volume_24h,percent_change_1h,percent_change_24h,percent_change_7d"` + SortDir *string `param:"sort_dir,query" validValues:"asc,desc"` + CryptocurrencyType *string `param:"cryptocurrency_type,query" default:"all" validValues:"all,coins,tokens"` + Aux *string `param:"aux,query" default:"platform,tags,date_added,circulating_supply,total_supply,max_supply,cmc_rank,num_market_pairs"` +} + +//go:generate requestgen -method GET -url "/v1/cryptocurrency/listings/latest" -type ListingsLatestRequest -responseType Response -responseDataField Data -responseDataType []Data +type ListingsLatestRequest struct { + Client requestgen.AuthenticatedAPIClient + + Start *int `param:"start,query" default:"1"` + Limit *int `param:"limit,query" default:"100"` + PriceMin *float64 `param:"price_min,query"` + PriceMax *float64 `param:"price_max,query"` + MarketCapMin *float64 `param:"market_cap_min,query"` + MarketCapMax *float64 `param:"market_cap_max,query"` + Volume24HMin *float64 `param:"volume_24h_min,query"` + Volume24HMax *float64 `param:"volume_24h_max,query"` + CirculatingSupplyMin *float64 `param:"circulating_supply_min,query"` + CirculatingSupplyMax *float64 `param:"circulating_supply_max,query"` + PercentChange24HMin *float64 `param:"percent_change_24h_min,query"` + PercentChange24HMax *float64 `param:"percent_change_24h_max,query"` + Convert *string `param:"convert,query"` + ConvertID *string `param:"convert_id,query"` + Sort *string `param:"sort,query" default:"market_cap" validValues:"name,symbol,date_added,market_cap,market_cap_strict,price,circulating_supply,total_supply,max_supply,num_market_pairs,volume_24h,percent_change_1h,percent_change_24h,percent_change_7d,market_cap_by_total_supply_strict,volume_7d,volume_30d"` + SortDir *string `param:"sort_dir,query" validValues:"asc,desc"` + CryptocurrencyType *string `param:"cryptocurrency_type,query" default:"all" validValues:"all,coins,tokens"` + Tag *string `param:"tag,query" default:"all" validValues:"all,defi,filesharing"` + Aux *string `param:"aux,query" default:"num_market_pairs,cmc_rank,date_added,tags,platform,max_supply,circulating_supply,total_supply"` +} + +//go:generate requestgen -method GET -url "/v1/cryptocurrency/listings/new" -type ListingsNewRequest -responseType Response -responseDataField Data -responseDataType []Data +type ListingsNewRequest struct { + Client requestgen.AuthenticatedAPIClient + + Start *int `param:"start,query" default:"1"` + Limit *int `param:"limit,query" default:"100"` + Convert *string `param:"convert,query"` + ConvertID *string `param:"convert_id,query"` + SortDir *string `param:"sort_dir,query" validValues:"asc,desc"` +} diff --git a/pkg/datasource/coinmarketcap/v1/listings_historical_request_requestgen.go b/pkg/datasource/coinmarketcap/v1/listings_historical_request_requestgen.go new file mode 100644 index 0000000000..c71651cdd4 --- /dev/null +++ b/pkg/datasource/coinmarketcap/v1/listings_historical_request_requestgen.go @@ -0,0 +1,315 @@ +// Code generated by "requestgen -method GET -url /v1/cryptocurrency/listings/historical -type ListingsHistoricalRequest -responseType Response -responseDataField Data -responseDataType []Data"; DO NOT EDIT. + +package v1 + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +func (l *ListingsHistoricalRequest) SetDate(Date string) *ListingsHistoricalRequest { + l.Date = Date + return l +} + +func (l *ListingsHistoricalRequest) SetStart(Start int) *ListingsHistoricalRequest { + l.Start = &Start + return l +} + +func (l *ListingsHistoricalRequest) SetLimit(Limit int) *ListingsHistoricalRequest { + l.Limit = &Limit + return l +} + +func (l *ListingsHistoricalRequest) SetConvert(Convert string) *ListingsHistoricalRequest { + l.Convert = &Convert + return l +} + +func (l *ListingsHistoricalRequest) SetConvertID(ConvertID string) *ListingsHistoricalRequest { + l.ConvertID = &ConvertID + return l +} + +func (l *ListingsHistoricalRequest) SetSort(Sort string) *ListingsHistoricalRequest { + l.Sort = &Sort + return l +} + +func (l *ListingsHistoricalRequest) SetSortDir(SortDir string) *ListingsHistoricalRequest { + l.SortDir = &SortDir + return l +} + +func (l *ListingsHistoricalRequest) SetCryptocurrencyType(CryptocurrencyType string) *ListingsHistoricalRequest { + l.CryptocurrencyType = &CryptocurrencyType + return l +} + +func (l *ListingsHistoricalRequest) SetAux(Aux string) *ListingsHistoricalRequest { + l.Aux = &Aux + return l +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (l *ListingsHistoricalRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Date field -> json key date + Date := l.Date + + // TEMPLATE check-required + if len(Date) == 0 { + return nil, fmt.Errorf("date is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Date + params["date"] = Date + // check Start field -> json key start + if l.Start != nil { + Start := *l.Start + + // assign parameter of Start + params["start"] = Start + } else { + Start := 1 + + // assign parameter of Start + params["start"] = Start + } + // check Limit field -> json key limit + if l.Limit != nil { + Limit := *l.Limit + + // assign parameter of Limit + params["limit"] = Limit + } else { + Limit := 100 + + // assign parameter of Limit + params["limit"] = Limit + } + // check Convert field -> json key convert + if l.Convert != nil { + Convert := *l.Convert + + // assign parameter of Convert + params["convert"] = Convert + } else { + } + // check ConvertID field -> json key convert_id + if l.ConvertID != nil { + ConvertID := *l.ConvertID + + // assign parameter of ConvertID + params["convert_id"] = ConvertID + } else { + } + // check Sort field -> json key sort + if l.Sort != nil { + Sort := *l.Sort + + // TEMPLATE check-valid-values + switch Sort { + case "cmc_rank", "name", "symbol", "market_cap", "price", "circulating_supply", "total_supply", "max_supply", "num_market_pairs", "volume_24h", "percent_change_1h", "percent_change_24h", "percent_change_7d": + params["sort"] = Sort + + default: + return nil, fmt.Errorf("sort value %v is invalid", Sort) + + } + // END TEMPLATE check-valid-values + + // assign parameter of Sort + params["sort"] = Sort + } else { + Sort := "cmc_rank" + + // assign parameter of Sort + params["sort"] = Sort + } + // check SortDir field -> json key sort_dir + if l.SortDir != nil { + SortDir := *l.SortDir + + // TEMPLATE check-valid-values + switch SortDir { + case "asc", "desc": + params["sort_dir"] = SortDir + + default: + return nil, fmt.Errorf("sort_dir value %v is invalid", SortDir) + + } + // END TEMPLATE check-valid-values + + // assign parameter of SortDir + params["sort_dir"] = SortDir + } else { + } + // check CryptocurrencyType field -> json key cryptocurrency_type + if l.CryptocurrencyType != nil { + CryptocurrencyType := *l.CryptocurrencyType + + // TEMPLATE check-valid-values + switch CryptocurrencyType { + case "all", "coins", "tokens": + params["cryptocurrency_type"] = CryptocurrencyType + + default: + return nil, fmt.Errorf("cryptocurrency_type value %v is invalid", CryptocurrencyType) + + } + // END TEMPLATE check-valid-values + + // assign parameter of CryptocurrencyType + params["cryptocurrency_type"] = CryptocurrencyType + } else { + CryptocurrencyType := "all" + + // assign parameter of CryptocurrencyType + params["cryptocurrency_type"] = CryptocurrencyType + } + // check Aux field -> json key aux + if l.Aux != nil { + Aux := *l.Aux + + // assign parameter of Aux + params["aux"] = Aux + } else { + Aux := "platform,tags,date_added,circulating_supply,total_supply,max_supply,cmc_rank,num_market_pairs" + + // assign parameter of Aux + params["aux"] = Aux + } + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (l *ListingsHistoricalRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (l *ListingsHistoricalRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := l.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if l.isVarSlice(_v) { + l.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (l *ListingsHistoricalRequest) GetParametersJSON() ([]byte, error) { + params, err := l.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (l *ListingsHistoricalRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (l *ListingsHistoricalRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (l *ListingsHistoricalRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (l *ListingsHistoricalRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (l *ListingsHistoricalRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := l.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (l *ListingsHistoricalRequest) Do(ctx context.Context) ([]Data, error) { + + // no body params + var params interface{} + query, err := l.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/cryptocurrency/listings/historical" + + req, err := l.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := l.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []Data + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/datasource/coinmarketcap/v1/listings_latest_request_requestgen.go b/pkg/datasource/coinmarketcap/v1/listings_latest_request_requestgen.go new file mode 100644 index 0000000000..fe453c8b7f --- /dev/null +++ b/pkg/datasource/coinmarketcap/v1/listings_latest_request_requestgen.go @@ -0,0 +1,457 @@ +// Code generated by "requestgen -method GET -url /v1/cryptocurrency/listings/latest -type ListingsLatestRequest -responseType Response -responseDataField Data -responseDataType []Data"; DO NOT EDIT. + +package v1 + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +func (l *ListingsLatestRequest) SetStart(Start int) *ListingsLatestRequest { + l.Start = &Start + return l +} + +func (l *ListingsLatestRequest) SetLimit(Limit int) *ListingsLatestRequest { + l.Limit = &Limit + return l +} + +func (l *ListingsLatestRequest) SetPriceMin(PriceMin float64) *ListingsLatestRequest { + l.PriceMin = &PriceMin + return l +} + +func (l *ListingsLatestRequest) SetPriceMax(PriceMax float64) *ListingsLatestRequest { + l.PriceMax = &PriceMax + return l +} + +func (l *ListingsLatestRequest) SetMarketCapMin(MarketCapMin float64) *ListingsLatestRequest { + l.MarketCapMin = &MarketCapMin + return l +} + +func (l *ListingsLatestRequest) SetMarketCapMax(MarketCapMax float64) *ListingsLatestRequest { + l.MarketCapMax = &MarketCapMax + return l +} + +func (l *ListingsLatestRequest) SetVolume24HMin(Volume24HMin float64) *ListingsLatestRequest { + l.Volume24HMin = &Volume24HMin + return l +} + +func (l *ListingsLatestRequest) SetVolume24HMax(Volume24HMax float64) *ListingsLatestRequest { + l.Volume24HMax = &Volume24HMax + return l +} + +func (l *ListingsLatestRequest) SetCirculatingSupplyMin(CirculatingSupplyMin float64) *ListingsLatestRequest { + l.CirculatingSupplyMin = &CirculatingSupplyMin + return l +} + +func (l *ListingsLatestRequest) SetCirculatingSupplyMax(CirculatingSupplyMax float64) *ListingsLatestRequest { + l.CirculatingSupplyMax = &CirculatingSupplyMax + return l +} + +func (l *ListingsLatestRequest) SetPercentChange24HMin(PercentChange24HMin float64) *ListingsLatestRequest { + l.PercentChange24HMin = &PercentChange24HMin + return l +} + +func (l *ListingsLatestRequest) SetPercentChange24HMax(PercentChange24HMax float64) *ListingsLatestRequest { + l.PercentChange24HMax = &PercentChange24HMax + return l +} + +func (l *ListingsLatestRequest) SetConvert(Convert string) *ListingsLatestRequest { + l.Convert = &Convert + return l +} + +func (l *ListingsLatestRequest) SetConvertID(ConvertID string) *ListingsLatestRequest { + l.ConvertID = &ConvertID + return l +} + +func (l *ListingsLatestRequest) SetSort(Sort string) *ListingsLatestRequest { + l.Sort = &Sort + return l +} + +func (l *ListingsLatestRequest) SetSortDir(SortDir string) *ListingsLatestRequest { + l.SortDir = &SortDir + return l +} + +func (l *ListingsLatestRequest) SetCryptocurrencyType(CryptocurrencyType string) *ListingsLatestRequest { + l.CryptocurrencyType = &CryptocurrencyType + return l +} + +func (l *ListingsLatestRequest) SetTag(Tag string) *ListingsLatestRequest { + l.Tag = &Tag + return l +} + +func (l *ListingsLatestRequest) SetAux(Aux string) *ListingsLatestRequest { + l.Aux = &Aux + return l +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (l *ListingsLatestRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Start field -> json key start + if l.Start != nil { + Start := *l.Start + + // assign parameter of Start + params["start"] = Start + } else { + Start := 1 + + // assign parameter of Start + params["start"] = Start + } + // check Limit field -> json key limit + if l.Limit != nil { + Limit := *l.Limit + + // assign parameter of Limit + params["limit"] = Limit + } else { + Limit := 100 + + // assign parameter of Limit + params["limit"] = Limit + } + // check PriceMin field -> json key price_min + if l.PriceMin != nil { + PriceMin := *l.PriceMin + + // assign parameter of PriceMin + params["price_min"] = PriceMin + } else { + } + // check PriceMax field -> json key price_max + if l.PriceMax != nil { + PriceMax := *l.PriceMax + + // assign parameter of PriceMax + params["price_max"] = PriceMax + } else { + } + // check MarketCapMin field -> json key market_cap_min + if l.MarketCapMin != nil { + MarketCapMin := *l.MarketCapMin + + // assign parameter of MarketCapMin + params["market_cap_min"] = MarketCapMin + } else { + } + // check MarketCapMax field -> json key market_cap_max + if l.MarketCapMax != nil { + MarketCapMax := *l.MarketCapMax + + // assign parameter of MarketCapMax + params["market_cap_max"] = MarketCapMax + } else { + } + // check Volume24HMin field -> json key volume_24h_min + if l.Volume24HMin != nil { + Volume24HMin := *l.Volume24HMin + + // assign parameter of Volume24HMin + params["volume_24h_min"] = Volume24HMin + } else { + } + // check Volume24HMax field -> json key volume_24h_max + if l.Volume24HMax != nil { + Volume24HMax := *l.Volume24HMax + + // assign parameter of Volume24HMax + params["volume_24h_max"] = Volume24HMax + } else { + } + // check CirculatingSupplyMin field -> json key circulating_supply_min + if l.CirculatingSupplyMin != nil { + CirculatingSupplyMin := *l.CirculatingSupplyMin + + // assign parameter of CirculatingSupplyMin + params["circulating_supply_min"] = CirculatingSupplyMin + } else { + } + // check CirculatingSupplyMax field -> json key circulating_supply_max + if l.CirculatingSupplyMax != nil { + CirculatingSupplyMax := *l.CirculatingSupplyMax + + // assign parameter of CirculatingSupplyMax + params["circulating_supply_max"] = CirculatingSupplyMax + } else { + } + // check PercentChange24HMin field -> json key percent_change_24h_min + if l.PercentChange24HMin != nil { + PercentChange24HMin := *l.PercentChange24HMin + + // assign parameter of PercentChange24HMin + params["percent_change_24h_min"] = PercentChange24HMin + } else { + } + // check PercentChange24HMax field -> json key percent_change_24h_max + if l.PercentChange24HMax != nil { + PercentChange24HMax := *l.PercentChange24HMax + + // assign parameter of PercentChange24HMax + params["percent_change_24h_max"] = PercentChange24HMax + } else { + } + // check Convert field -> json key convert + if l.Convert != nil { + Convert := *l.Convert + + // assign parameter of Convert + params["convert"] = Convert + } else { + } + // check ConvertID field -> json key convert_id + if l.ConvertID != nil { + ConvertID := *l.ConvertID + + // assign parameter of ConvertID + params["convert_id"] = ConvertID + } else { + } + // check Sort field -> json key sort + if l.Sort != nil { + Sort := *l.Sort + + // TEMPLATE check-valid-values + switch Sort { + case "name", "symbol", "date_added", "market_cap", "market_cap_strict", "price", "circulating_supply", "total_supply", "max_supply", "num_market_pairs", "volume_24h", "percent_change_1h", "percent_change_24h", "percent_change_7d", "market_cap_by_total_supply_strict", "volume_7d", "volume_30d": + params["sort"] = Sort + + default: + return nil, fmt.Errorf("sort value %v is invalid", Sort) + + } + // END TEMPLATE check-valid-values + + // assign parameter of Sort + params["sort"] = Sort + } else { + Sort := "market_cap" + + // assign parameter of Sort + params["sort"] = Sort + } + // check SortDir field -> json key sort_dir + if l.SortDir != nil { + SortDir := *l.SortDir + + // TEMPLATE check-valid-values + switch SortDir { + case "asc", "desc": + params["sort_dir"] = SortDir + + default: + return nil, fmt.Errorf("sort_dir value %v is invalid", SortDir) + + } + // END TEMPLATE check-valid-values + + // assign parameter of SortDir + params["sort_dir"] = SortDir + } else { + } + // check CryptocurrencyType field -> json key cryptocurrency_type + if l.CryptocurrencyType != nil { + CryptocurrencyType := *l.CryptocurrencyType + + // TEMPLATE check-valid-values + switch CryptocurrencyType { + case "all", "coins", "tokens": + params["cryptocurrency_type"] = CryptocurrencyType + + default: + return nil, fmt.Errorf("cryptocurrency_type value %v is invalid", CryptocurrencyType) + + } + // END TEMPLATE check-valid-values + + // assign parameter of CryptocurrencyType + params["cryptocurrency_type"] = CryptocurrencyType + } else { + CryptocurrencyType := "all" + + // assign parameter of CryptocurrencyType + params["cryptocurrency_type"] = CryptocurrencyType + } + // check Tag field -> json key tag + if l.Tag != nil { + Tag := *l.Tag + + // TEMPLATE check-valid-values + switch Tag { + case "all", "defi", "filesharing": + params["tag"] = Tag + + default: + return nil, fmt.Errorf("tag value %v is invalid", Tag) + + } + // END TEMPLATE check-valid-values + + // assign parameter of Tag + params["tag"] = Tag + } else { + Tag := "all" + + // assign parameter of Tag + params["tag"] = Tag + } + // check Aux field -> json key aux + if l.Aux != nil { + Aux := *l.Aux + + // assign parameter of Aux + params["aux"] = Aux + } else { + Aux := "num_market_pairs,cmc_rank,date_added,tags,platform,max_supply,circulating_supply,total_supply" + + // assign parameter of Aux + params["aux"] = Aux + } + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (l *ListingsLatestRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (l *ListingsLatestRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := l.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if l.isVarSlice(_v) { + l.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (l *ListingsLatestRequest) GetParametersJSON() ([]byte, error) { + params, err := l.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (l *ListingsLatestRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (l *ListingsLatestRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (l *ListingsLatestRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (l *ListingsLatestRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (l *ListingsLatestRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := l.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (l *ListingsLatestRequest) Do(ctx context.Context) ([]Data, error) { + + // no body params + var params interface{} + query, err := l.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/cryptocurrency/listings/latest" + + req, err := l.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := l.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []Data + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/datasource/coinmarketcap/v1/listings_new_request_requestgen.go b/pkg/datasource/coinmarketcap/v1/listings_new_request_requestgen.go new file mode 100644 index 0000000000..a23e47d345 --- /dev/null +++ b/pkg/datasource/coinmarketcap/v1/listings_new_request_requestgen.go @@ -0,0 +1,226 @@ +// Code generated by "requestgen -method GET -url /v1/cryptocurrency/listings/new -type ListingsNewRequest -responseType Response -responseDataField Data -responseDataType []Data"; DO NOT EDIT. + +package v1 + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +func (l *ListingsNewRequest) SetStart(Start int) *ListingsNewRequest { + l.Start = &Start + return l +} + +func (l *ListingsNewRequest) SetLimit(Limit int) *ListingsNewRequest { + l.Limit = &Limit + return l +} + +func (l *ListingsNewRequest) SetConvert(Convert string) *ListingsNewRequest { + l.Convert = &Convert + return l +} + +func (l *ListingsNewRequest) SetConvertID(ConvertID string) *ListingsNewRequest { + l.ConvertID = &ConvertID + return l +} + +func (l *ListingsNewRequest) SetSortDir(SortDir string) *ListingsNewRequest { + l.SortDir = &SortDir + return l +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (l *ListingsNewRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Start field -> json key start + if l.Start != nil { + Start := *l.Start + + // assign parameter of Start + params["start"] = Start + } else { + Start := 1 + + // assign parameter of Start + params["start"] = Start + } + // check Limit field -> json key limit + if l.Limit != nil { + Limit := *l.Limit + + // assign parameter of Limit + params["limit"] = Limit + } else { + Limit := 100 + + // assign parameter of Limit + params["limit"] = Limit + } + // check Convert field -> json key convert + if l.Convert != nil { + Convert := *l.Convert + + // assign parameter of Convert + params["convert"] = Convert + } else { + } + // check ConvertID field -> json key convert_id + if l.ConvertID != nil { + ConvertID := *l.ConvertID + + // assign parameter of ConvertID + params["convert_id"] = ConvertID + } else { + } + // check SortDir field -> json key sort_dir + if l.SortDir != nil { + SortDir := *l.SortDir + + // TEMPLATE check-valid-values + switch SortDir { + case "asc", "desc": + params["sort_dir"] = SortDir + + default: + return nil, fmt.Errorf("sort_dir value %v is invalid", SortDir) + + } + // END TEMPLATE check-valid-values + + // assign parameter of SortDir + params["sort_dir"] = SortDir + } else { + } + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (l *ListingsNewRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (l *ListingsNewRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := l.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if l.isVarSlice(_v) { + l.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (l *ListingsNewRequest) GetParametersJSON() ([]byte, error) { + params, err := l.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (l *ListingsNewRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (l *ListingsNewRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (l *ListingsNewRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (l *ListingsNewRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (l *ListingsNewRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := l.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (l *ListingsNewRequest) Do(ctx context.Context) ([]json.RawMessage, error) { + + // no body params + var params interface{} + query, err := l.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/cryptocurrency/listings/new" + + req, err := l.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := l.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []json.RawMessage + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/datasource/coinmarketcap/v1/types.go b/pkg/datasource/coinmarketcap/v1/types.go new file mode 100644 index 0000000000..a505625ea1 --- /dev/null +++ b/pkg/datasource/coinmarketcap/v1/types.go @@ -0,0 +1,61 @@ +package v1 + +import ( + "encoding/json" + "time" +) + +type Response struct { + Data json.RawMessage `json:"data"` + Status Status `json:"status"` +} + +type Data struct { + ID int64 `json:"id"` + Name string `json:"name"` + Symbol string `json:"symbol"` + Slug string `json:"slug"` + CmcRank int64 `json:"cmc_rank,omitempty"` + IsActive bool `json:"is_active,omitempty"` + IsFiat int64 `json:"is_fiat,omitempty"` + NumMarketPairs int64 `json:"num_market_pairs"` + CirculatingSupply float64 `json:"circulating_supply"` + TotalSupply float64 `json:"total_supply"` + MaxSupply float64 `json:"max_supply"` + LastUpdated time.Time `json:"last_updated"` + DateAdded time.Time `json:"date_added"` + Tags []string `json:"tags"` + SelfReportedCirculatingSupply float64 `json:"self_reported_circulating_supply,omitempty"` + SelfReportedMarketCap float64 `json:"self_reported_market_cap,omitempty"` + Platform Platform `json:"platform"` + Quote map[string]Quote `json:"quote"` +} + +type Quote struct { + Price float64 `json:"price"` + Volume24H float64 `json:"volume_24h"` + VolumeChange24H float64 `json:"volume_change_24h"` + PercentChange1H float64 `json:"percent_change_1h"` + PercentChange24H float64 `json:"percent_change_24h"` + PercentChange7D float64 `json:"percent_change_7d"` + MarketCap float64 `json:"market_cap"` + MarketCapDominance float64 `json:"market_cap_dominance"` + FullyDilutedMarketCap float64 `json:"fully_diluted_market_cap"` + LastUpdated time.Time `json:"last_updated"` +} + +type Status struct { + Timestamp time.Time `json:"timestamp"` + ErrorCode int `json:"error_code"` + ErrorMessage string `json:"error_message"` + Elapsed int `json:"elapsed"` + CreditCount int `json:"credit_count"` +} + +type Platform struct { + ID int `json:"id"` + Name string `json:"name"` + Symbol string `json:"symbol"` + Slug string `json:"slug"` + TokenAddress string `json:"token_address"` +} diff --git a/pkg/datasource/glassnode/datasource.go b/pkg/datasource/glassnode/datasource.go new file mode 100644 index 0000000000..fa6ff51cc9 --- /dev/null +++ b/pkg/datasource/glassnode/datasource.go @@ -0,0 +1,59 @@ +package glassnode + +import ( + "context" + "time" + + "github.com/c9s/bbgo/pkg/datasource/glassnode/glassnodeapi" +) + +type DataSource struct { + client *glassnodeapi.RestClient +} + +func New(apiKey string) *DataSource { + client := glassnodeapi.NewRestClient() + client.Auth(apiKey) + + return &DataSource{client: client} +} + +// query last futures open interest +// https://docs.glassnode.com/api/derivatives#futures-open-interest +func (d *DataSource) QueryFuturesOpenInterest(ctx context.Context, currency string) (float64, error) { + req := glassnodeapi.DerivativesRequest{ + Client: d.client, + Asset: currency, + // 25 hours ago + Since: time.Now().Add(-25 * time.Hour).Unix(), + Interval: glassnodeapi.Interval24h, + Metric: "futures_open_interest_sum", + } + + resp, err := req.Do(ctx) + if err != nil { + return 0, err + } + + return resp.Last().Value, nil +} + +// query last market cap in usd +// https://docs.glassnode.com/api/market#market-cap +func (d *DataSource) QueryMarketCapInUSD(ctx context.Context, currency string) (float64, error) { + req := glassnodeapi.MarketRequest{ + Client: d.client, + Asset: currency, + // 25 hours ago + Since: time.Now().Add(-25 * time.Hour).Unix(), + Interval: glassnodeapi.Interval24h, + Metric: "marketcap_usd", + } + + resp, err := req.Do(ctx) + if err != nil { + return 0, err + } + + return resp.Last().Value, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/addresses.go b/pkg/datasource/glassnode/glassnodeapi/addresses.go new file mode 100644 index 0000000000..261880fe0d --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/addresses.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type AddressesRequest -url "/v1/metrics/addresses/:metric" -responseType Response +type AddressesRequest struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/addresses_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/addresses_request_requestgen.go new file mode 100644 index 0000000000..5af5f7724f --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/addresses_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type AddressesRequest -url /v1/metrics/addresses/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (a *AddressesRequest) SetAsset(Asset string) *AddressesRequest { + a.Asset = Asset + return a +} + +func (a *AddressesRequest) SetSince(Since int64) *AddressesRequest { + a.Since = Since + return a +} + +func (a *AddressesRequest) SetUntil(Until int64) *AddressesRequest { + a.Until = Until + return a +} + +func (a *AddressesRequest) SetInterval(Interval Interval) *AddressesRequest { + a.Interval = Interval + return a +} + +func (a *AddressesRequest) SetFormat(Format Format) *AddressesRequest { + a.Format = Format + return a +} + +func (a *AddressesRequest) SetTimestampFormat(TimestampFormat string) *AddressesRequest { + a.TimestampFormat = TimestampFormat + return a +} + +func (a *AddressesRequest) SetMetric(Metric string) *AddressesRequest { + a.Metric = Metric + return a +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (a *AddressesRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := a.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := a.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := a.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := a.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := a.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := a.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (a *AddressesRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (a *AddressesRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := a.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (a *AddressesRequest) GetParametersJSON() ([]byte, error) { + params, err := a.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (a *AddressesRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := a.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (a *AddressesRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (a *AddressesRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := a.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (a *AddressesRequest) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := a.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/addresses/:metric" + slugs, err := a.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = a.applySlugsToUrl(apiURL, slugs) + + req, err := a.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := a.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/blockchain.go b/pkg/datasource/glassnode/glassnodeapi/blockchain.go new file mode 100644 index 0000000000..a9370f725a --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/blockchain.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type BlockchainRequest -url "/v1/metrics/blockchain/:metric" -responseType Response +type BlockchainRequest struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/blockchain_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/blockchain_request_requestgen.go new file mode 100644 index 0000000000..d9978d3d87 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/blockchain_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type BlockchainRequest -url /v1/metrics/blockchain/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (b *BlockchainRequest) SetAsset(Asset string) *BlockchainRequest { + b.Asset = Asset + return b +} + +func (b *BlockchainRequest) SetSince(Since int64) *BlockchainRequest { + b.Since = Since + return b +} + +func (b *BlockchainRequest) SetUntil(Until int64) *BlockchainRequest { + b.Until = Until + return b +} + +func (b *BlockchainRequest) SetInterval(Interval Interval) *BlockchainRequest { + b.Interval = Interval + return b +} + +func (b *BlockchainRequest) SetFormat(Format Format) *BlockchainRequest { + b.Format = Format + return b +} + +func (b *BlockchainRequest) SetTimestampFormat(TimestampFormat string) *BlockchainRequest { + b.TimestampFormat = TimestampFormat + return b +} + +func (b *BlockchainRequest) SetMetric(Metric string) *BlockchainRequest { + b.Metric = Metric + return b +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (b *BlockchainRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := b.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := b.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := b.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := b.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := b.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := b.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (b *BlockchainRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (b *BlockchainRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := b.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (b *BlockchainRequest) GetParametersJSON() ([]byte, error) { + params, err := b.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (b *BlockchainRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := b.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (b *BlockchainRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (b *BlockchainRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := b.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (b *BlockchainRequest) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := b.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/blockchain/:metric" + slugs, err := b.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = b.applySlugsToUrl(apiURL, slugs) + + req, err := b.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := b.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/client.go b/pkg/datasource/glassnode/glassnodeapi/client.go new file mode 100644 index 0000000000..fcc71d0b13 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/client.go @@ -0,0 +1,55 @@ +package glassnodeapi + +import ( + "context" + "net/http" + "net/url" + "time" + + "github.com/c9s/requestgen" +) + +const defaultHTTPTimeout = time.Second * 15 +const glassnodeBaseURL = "https://api.glassnode.com" + +type RestClient struct { + requestgen.BaseAPIClient + + apiKey string +} + +func NewRestClient() *RestClient { + u, err := url.Parse(glassnodeBaseURL) + if err != nil { + panic(err) + } + + return &RestClient{ + BaseAPIClient: requestgen.BaseAPIClient{ + BaseURL: u, + HttpClient: &http.Client{ + Timeout: defaultHTTPTimeout, + }, + }, + } +} + +func (c *RestClient) Auth(apiKey string) { + // pragma: allowlist nextline secret + c.apiKey = apiKey +} + +func (c *RestClient) NewAuthenticatedRequest(ctx context.Context, method, refURL string, params url.Values, payload interface{}) (*http.Request, error) { + req, err := c.NewRequest(ctx, method, refURL, params, payload) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", "application/json") + req.Header.Add("Accept", "application/json") + + // Attch API Key to header. https://docs.glassnode.com/basic-api/api-key#usage + req.Header.Add("X-Api-Key", c.apiKey) + + return req, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/defi.go b/pkg/datasource/glassnode/glassnodeapi/defi.go new file mode 100644 index 0000000000..44534256c6 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/defi.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type DefiRequest -url "/v1/metrics/defi/:metric" -responseType Response +type DefiRequest struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/defi_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/defi_request_requestgen.go new file mode 100644 index 0000000000..df39ba1fb8 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/defi_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type DefiRequest -url /v1/metrics/defi/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (d *DefiRequest) SetAsset(Asset string) *DefiRequest { + d.Asset = Asset + return d +} + +func (d *DefiRequest) SetSince(Since int64) *DefiRequest { + d.Since = Since + return d +} + +func (d *DefiRequest) SetUntil(Until int64) *DefiRequest { + d.Until = Until + return d +} + +func (d *DefiRequest) SetInterval(Interval Interval) *DefiRequest { + d.Interval = Interval + return d +} + +func (d *DefiRequest) SetFormat(Format Format) *DefiRequest { + d.Format = Format + return d +} + +func (d *DefiRequest) SetTimestampFormat(TimestampFormat string) *DefiRequest { + d.TimestampFormat = TimestampFormat + return d +} + +func (d *DefiRequest) SetMetric(Metric string) *DefiRequest { + d.Metric = Metric + return d +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (d *DefiRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := d.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := d.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := d.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := d.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := d.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := d.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (d *DefiRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (d *DefiRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := d.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (d *DefiRequest) GetParametersJSON() ([]byte, error) { + params, err := d.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (d *DefiRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := d.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (d *DefiRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (d *DefiRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := d.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (d *DefiRequest) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := d.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/defi/:metric" + slugs, err := d.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = d.applySlugsToUrl(apiURL, slugs) + + req, err := d.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := d.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/derivatives.go b/pkg/datasource/glassnode/glassnodeapi/derivatives.go new file mode 100644 index 0000000000..00f0f0e69b --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/derivatives.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type DerivativesRequest -url "/v1/metrics/derivatives/:metric" -responseType Response +type DerivativesRequest struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/derivatives_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/derivatives_request_requestgen.go new file mode 100644 index 0000000000..75c861a5c1 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/derivatives_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type DerivativesRequest -url /v1/metrics/derivatives/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (d *DerivativesRequest) SetAsset(Asset string) *DerivativesRequest { + d.Asset = Asset + return d +} + +func (d *DerivativesRequest) SetSince(Since int64) *DerivativesRequest { + d.Since = Since + return d +} + +func (d *DerivativesRequest) SetUntil(Until int64) *DerivativesRequest { + d.Until = Until + return d +} + +func (d *DerivativesRequest) SetInterval(Interval Interval) *DerivativesRequest { + d.Interval = Interval + return d +} + +func (d *DerivativesRequest) SetFormat(Format Format) *DerivativesRequest { + d.Format = Format + return d +} + +func (d *DerivativesRequest) SetTimestampFormat(TimestampFormat string) *DerivativesRequest { + d.TimestampFormat = TimestampFormat + return d +} + +func (d *DerivativesRequest) SetMetric(Metric string) *DerivativesRequest { + d.Metric = Metric + return d +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (d *DerivativesRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := d.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := d.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := d.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := d.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := d.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := d.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (d *DerivativesRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (d *DerivativesRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := d.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (d *DerivativesRequest) GetParametersJSON() ([]byte, error) { + params, err := d.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (d *DerivativesRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := d.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (d *DerivativesRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (d *DerivativesRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := d.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (d *DerivativesRequest) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := d.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/derivatives/:metric" + slugs, err := d.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = d.applySlugsToUrl(apiURL, slugs) + + req, err := d.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := d.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/distribution.go b/pkg/datasource/glassnode/glassnodeapi/distribution.go new file mode 100644 index 0000000000..35b47736d6 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/distribution.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type DistributionRequest -url "/v1/metrics/distribution/:metric" -responseType Response +type DistributionRequest struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/distribution_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/distribution_request_requestgen.go new file mode 100644 index 0000000000..aa45e22cef --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/distribution_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type DistributionRequest -url /v1/metrics/distribution/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (d *DistributionRequest) SetAsset(Asset string) *DistributionRequest { + d.Asset = Asset + return d +} + +func (d *DistributionRequest) SetSince(Since int64) *DistributionRequest { + d.Since = Since + return d +} + +func (d *DistributionRequest) SetUntil(Until int64) *DistributionRequest { + d.Until = Until + return d +} + +func (d *DistributionRequest) SetInterval(Interval Interval) *DistributionRequest { + d.Interval = Interval + return d +} + +func (d *DistributionRequest) SetFormat(Format Format) *DistributionRequest { + d.Format = Format + return d +} + +func (d *DistributionRequest) SetTimestampFormat(TimestampFormat string) *DistributionRequest { + d.TimestampFormat = TimestampFormat + return d +} + +func (d *DistributionRequest) SetMetric(Metric string) *DistributionRequest { + d.Metric = Metric + return d +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (d *DistributionRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := d.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := d.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := d.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := d.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := d.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := d.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (d *DistributionRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (d *DistributionRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := d.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (d *DistributionRequest) GetParametersJSON() ([]byte, error) { + params, err := d.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (d *DistributionRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := d.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (d *DistributionRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (d *DistributionRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := d.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (d *DistributionRequest) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := d.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/distribution/:metric" + slugs, err := d.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = d.applySlugsToUrl(apiURL, slugs) + + req, err := d.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := d.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/entities.go b/pkg/datasource/glassnode/glassnodeapi/entities.go new file mode 100644 index 0000000000..5e32dad26a --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/entities.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type EntitiesRequest -url "/v1/metrics/entities/:metric" -responseType Response +type EntitiesRequest struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/entities_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/entities_request_requestgen.go new file mode 100644 index 0000000000..3fd3e39297 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/entities_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type EntitiesRequest -url /v1/metrics/entities/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (e *EntitiesRequest) SetAsset(Asset string) *EntitiesRequest { + e.Asset = Asset + return e +} + +func (e *EntitiesRequest) SetSince(Since int64) *EntitiesRequest { + e.Since = Since + return e +} + +func (e *EntitiesRequest) SetUntil(Until int64) *EntitiesRequest { + e.Until = Until + return e +} + +func (e *EntitiesRequest) SetInterval(Interval Interval) *EntitiesRequest { + e.Interval = Interval + return e +} + +func (e *EntitiesRequest) SetFormat(Format Format) *EntitiesRequest { + e.Format = Format + return e +} + +func (e *EntitiesRequest) SetTimestampFormat(TimestampFormat string) *EntitiesRequest { + e.TimestampFormat = TimestampFormat + return e +} + +func (e *EntitiesRequest) SetMetric(Metric string) *EntitiesRequest { + e.Metric = Metric + return e +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (e *EntitiesRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := e.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := e.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := e.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := e.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := e.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := e.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (e *EntitiesRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (e *EntitiesRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := e.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (e *EntitiesRequest) GetParametersJSON() ([]byte, error) { + params, err := e.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (e *EntitiesRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := e.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (e *EntitiesRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (e *EntitiesRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := e.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (e *EntitiesRequest) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := e.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/entities/:metric" + slugs, err := e.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = e.applySlugsToUrl(apiURL, slugs) + + req, err := e.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := e.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/eth2.go b/pkg/datasource/glassnode/glassnodeapi/eth2.go new file mode 100644 index 0000000000..241ab4d170 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/eth2.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type ETH2Request -url "/v1/metrics/eth2/:metric" -responseType Response +type ETH2Request struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/eth_2_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/eth_2_request_requestgen.go new file mode 100644 index 0000000000..3d9502a330 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/eth_2_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type ETH2Request -url /v1/metrics/eth2/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (e *ETH2Request) SetAsset(Asset string) *ETH2Request { + e.Asset = Asset + return e +} + +func (e *ETH2Request) SetSince(Since int64) *ETH2Request { + e.Since = Since + return e +} + +func (e *ETH2Request) SetUntil(Until int64) *ETH2Request { + e.Until = Until + return e +} + +func (e *ETH2Request) SetInterval(Interval Interval) *ETH2Request { + e.Interval = Interval + return e +} + +func (e *ETH2Request) SetFormat(Format Format) *ETH2Request { + e.Format = Format + return e +} + +func (e *ETH2Request) SetTimestampFormat(TimestampFormat string) *ETH2Request { + e.TimestampFormat = TimestampFormat + return e +} + +func (e *ETH2Request) SetMetric(Metric string) *ETH2Request { + e.Metric = Metric + return e +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (e *ETH2Request) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := e.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := e.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := e.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := e.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := e.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := e.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (e *ETH2Request) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (e *ETH2Request) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := e.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (e *ETH2Request) GetParametersJSON() ([]byte, error) { + params, err := e.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (e *ETH2Request) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := e.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (e *ETH2Request) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (e *ETH2Request) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := e.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (e *ETH2Request) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := e.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/eth2/:metric" + slugs, err := e.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = e.applySlugsToUrl(apiURL, slugs) + + req, err := e.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := e.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/fees.go b/pkg/datasource/glassnode/glassnodeapi/fees.go new file mode 100644 index 0000000000..449b567765 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/fees.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type FeesRequest -url "/v1/metrics/fees/:metric" -responseType Response +type FeesRequest struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/fees_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/fees_request_requestgen.go new file mode 100644 index 0000000000..e5071cc5e4 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/fees_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type FeesRequest -url /v1/metrics/fees/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (f *FeesRequest) SetAsset(Asset string) *FeesRequest { + f.Asset = Asset + return f +} + +func (f *FeesRequest) SetSince(Since int64) *FeesRequest { + f.Since = Since + return f +} + +func (f *FeesRequest) SetUntil(Until int64) *FeesRequest { + f.Until = Until + return f +} + +func (f *FeesRequest) SetInterval(Interval Interval) *FeesRequest { + f.Interval = Interval + return f +} + +func (f *FeesRequest) SetFormat(Format Format) *FeesRequest { + f.Format = Format + return f +} + +func (f *FeesRequest) SetTimestampFormat(TimestampFormat string) *FeesRequest { + f.TimestampFormat = TimestampFormat + return f +} + +func (f *FeesRequest) SetMetric(Metric string) *FeesRequest { + f.Metric = Metric + return f +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (f *FeesRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := f.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := f.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := f.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := f.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := f.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := f.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (f *FeesRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (f *FeesRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := f.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (f *FeesRequest) GetParametersJSON() ([]byte, error) { + params, err := f.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (f *FeesRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := f.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (f *FeesRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (f *FeesRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := f.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (f *FeesRequest) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := f.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/fees/:metric" + slugs, err := f.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = f.applySlugsToUrl(apiURL, slugs) + + req, err := f.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := f.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/indicators.go b/pkg/datasource/glassnode/glassnodeapi/indicators.go new file mode 100644 index 0000000000..d8382545e3 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/indicators.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type IndicatorsRequest -url "/v1/metrics/indicators/:metric" -responseType Response +type IndicatorsRequest struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/indicators_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/indicators_request_requestgen.go new file mode 100644 index 0000000000..0df98c8036 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/indicators_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type IndicatorsRequest -url /v1/metrics/indicators/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (i *IndicatorsRequest) SetAsset(Asset string) *IndicatorsRequest { + i.Asset = Asset + return i +} + +func (i *IndicatorsRequest) SetSince(Since int64) *IndicatorsRequest { + i.Since = Since + return i +} + +func (i *IndicatorsRequest) SetUntil(Until int64) *IndicatorsRequest { + i.Until = Until + return i +} + +func (i *IndicatorsRequest) SetInterval(Interval Interval) *IndicatorsRequest { + i.Interval = Interval + return i +} + +func (i *IndicatorsRequest) SetFormat(Format Format) *IndicatorsRequest { + i.Format = Format + return i +} + +func (i *IndicatorsRequest) SetTimestampFormat(TimestampFormat string) *IndicatorsRequest { + i.TimestampFormat = TimestampFormat + return i +} + +func (i *IndicatorsRequest) SetMetric(Metric string) *IndicatorsRequest { + i.Metric = Metric + return i +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (i *IndicatorsRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := i.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := i.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := i.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := i.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := i.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := i.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (i *IndicatorsRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (i *IndicatorsRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := i.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (i *IndicatorsRequest) GetParametersJSON() ([]byte, error) { + params, err := i.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (i *IndicatorsRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := i.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (i *IndicatorsRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (i *IndicatorsRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := i.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (i *IndicatorsRequest) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := i.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/indicators/:metric" + slugs, err := i.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = i.applySlugsToUrl(apiURL, slugs) + + req, err := i.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := i.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/institutions.go b/pkg/datasource/glassnode/glassnodeapi/institutions.go new file mode 100644 index 0000000000..671c054594 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/institutions.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type InstitutionsRequest -url "/v1/metrics/institutions/:metric" -responseType Response +type InstitutionsRequest struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/institutions_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/institutions_request_requestgen.go new file mode 100644 index 0000000000..b5ef3ff2b8 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/institutions_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type InstitutionsRequest -url /v1/metrics/institutions/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (i *InstitutionsRequest) SetAsset(Asset string) *InstitutionsRequest { + i.Asset = Asset + return i +} + +func (i *InstitutionsRequest) SetSince(Since int64) *InstitutionsRequest { + i.Since = Since + return i +} + +func (i *InstitutionsRequest) SetUntil(Until int64) *InstitutionsRequest { + i.Until = Until + return i +} + +func (i *InstitutionsRequest) SetInterval(Interval Interval) *InstitutionsRequest { + i.Interval = Interval + return i +} + +func (i *InstitutionsRequest) SetFormat(Format Format) *InstitutionsRequest { + i.Format = Format + return i +} + +func (i *InstitutionsRequest) SetTimestampFormat(TimestampFormat string) *InstitutionsRequest { + i.TimestampFormat = TimestampFormat + return i +} + +func (i *InstitutionsRequest) SetMetric(Metric string) *InstitutionsRequest { + i.Metric = Metric + return i +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (i *InstitutionsRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := i.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := i.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := i.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := i.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := i.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := i.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (i *InstitutionsRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (i *InstitutionsRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := i.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (i *InstitutionsRequest) GetParametersJSON() ([]byte, error) { + params, err := i.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (i *InstitutionsRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := i.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (i *InstitutionsRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (i *InstitutionsRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := i.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (i *InstitutionsRequest) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := i.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/institutions/:metric" + slugs, err := i.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = i.applySlugsToUrl(apiURL, slugs) + + req, err := i.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := i.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/lightning.go b/pkg/datasource/glassnode/glassnodeapi/lightning.go new file mode 100644 index 0000000000..d8040db5eb --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/lightning.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type LightningRequest -url "/v1/metrics/lightning/:metric" -responseType Response +type LightningRequest struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/lightning_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/lightning_request_requestgen.go new file mode 100644 index 0000000000..961866226a --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/lightning_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type LightningRequest -url /v1/metrics/lightning/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (l *LightningRequest) SetAsset(Asset string) *LightningRequest { + l.Asset = Asset + return l +} + +func (l *LightningRequest) SetSince(Since int64) *LightningRequest { + l.Since = Since + return l +} + +func (l *LightningRequest) SetUntil(Until int64) *LightningRequest { + l.Until = Until + return l +} + +func (l *LightningRequest) SetInterval(Interval Interval) *LightningRequest { + l.Interval = Interval + return l +} + +func (l *LightningRequest) SetFormat(Format Format) *LightningRequest { + l.Format = Format + return l +} + +func (l *LightningRequest) SetTimestampFormat(TimestampFormat string) *LightningRequest { + l.TimestampFormat = TimestampFormat + return l +} + +func (l *LightningRequest) SetMetric(Metric string) *LightningRequest { + l.Metric = Metric + return l +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (l *LightningRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := l.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := l.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := l.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := l.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := l.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := l.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (l *LightningRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (l *LightningRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := l.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (l *LightningRequest) GetParametersJSON() ([]byte, error) { + params, err := l.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (l *LightningRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := l.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (l *LightningRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (l *LightningRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := l.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (l *LightningRequest) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := l.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/lightning/:metric" + slugs, err := l.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = l.applySlugsToUrl(apiURL, slugs) + + req, err := l.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := l.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/market.go b/pkg/datasource/glassnode/glassnodeapi/market.go new file mode 100644 index 0000000000..aeefd0d383 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/market.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type MarketRequest -url "/v1/metrics/market/:metric" -responseType Response +type MarketRequest struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/market_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/market_request_requestgen.go new file mode 100644 index 0000000000..da35ff3d80 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/market_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type MarketRequest -url /v1/metrics/market/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (m *MarketRequest) SetAsset(Asset string) *MarketRequest { + m.Asset = Asset + return m +} + +func (m *MarketRequest) SetSince(Since int64) *MarketRequest { + m.Since = Since + return m +} + +func (m *MarketRequest) SetUntil(Until int64) *MarketRequest { + m.Until = Until + return m +} + +func (m *MarketRequest) SetInterval(Interval Interval) *MarketRequest { + m.Interval = Interval + return m +} + +func (m *MarketRequest) SetFormat(Format Format) *MarketRequest { + m.Format = Format + return m +} + +func (m *MarketRequest) SetTimestampFormat(TimestampFormat string) *MarketRequest { + m.TimestampFormat = TimestampFormat + return m +} + +func (m *MarketRequest) SetMetric(Metric string) *MarketRequest { + m.Metric = Metric + return m +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (m *MarketRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := m.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := m.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := m.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := m.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := m.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := m.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (m *MarketRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (m *MarketRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := m.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (m *MarketRequest) GetParametersJSON() ([]byte, error) { + params, err := m.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (m *MarketRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := m.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (m *MarketRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (m *MarketRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := m.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (m *MarketRequest) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := m.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/market/:metric" + slugs, err := m.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = m.applySlugsToUrl(apiURL, slugs) + + req, err := m.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := m.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/mempool.go b/pkg/datasource/glassnode/glassnodeapi/mempool.go new file mode 100644 index 0000000000..059c1c67b2 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/mempool.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type MempoolRequest -url "/v1/metrics/mempool/:metric" -responseType Response +type MempoolRequest struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/mempool_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/mempool_request_requestgen.go new file mode 100644 index 0000000000..1b49b93f89 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/mempool_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type MempoolRequest -url /v1/metrics/mempool/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (m *MempoolRequest) SetAsset(Asset string) *MempoolRequest { + m.Asset = Asset + return m +} + +func (m *MempoolRequest) SetSince(Since int64) *MempoolRequest { + m.Since = Since + return m +} + +func (m *MempoolRequest) SetUntil(Until int64) *MempoolRequest { + m.Until = Until + return m +} + +func (m *MempoolRequest) SetInterval(Interval Interval) *MempoolRequest { + m.Interval = Interval + return m +} + +func (m *MempoolRequest) SetFormat(Format Format) *MempoolRequest { + m.Format = Format + return m +} + +func (m *MempoolRequest) SetTimestampFormat(TimestampFormat string) *MempoolRequest { + m.TimestampFormat = TimestampFormat + return m +} + +func (m *MempoolRequest) SetMetric(Metric string) *MempoolRequest { + m.Metric = Metric + return m +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (m *MempoolRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := m.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := m.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := m.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := m.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := m.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := m.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (m *MempoolRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (m *MempoolRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := m.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (m *MempoolRequest) GetParametersJSON() ([]byte, error) { + params, err := m.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (m *MempoolRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := m.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (m *MempoolRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (m *MempoolRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := m.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (m *MempoolRequest) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := m.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/mempool/:metric" + slugs, err := m.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = m.applySlugsToUrl(apiURL, slugs) + + req, err := m.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := m.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/mining.go b/pkg/datasource/glassnode/glassnodeapi/mining.go new file mode 100644 index 0000000000..44cda95a1d --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/mining.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type MiningRequest -url "/v1/metrics/mining/:metric" -responseType Response +type MiningRequest struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/mining_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/mining_request_requestgen.go new file mode 100644 index 0000000000..b448ba87d7 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/mining_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type MiningRequest -url /v1/metrics/mining/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (m *MiningRequest) SetAsset(Asset string) *MiningRequest { + m.Asset = Asset + return m +} + +func (m *MiningRequest) SetSince(Since int64) *MiningRequest { + m.Since = Since + return m +} + +func (m *MiningRequest) SetUntil(Until int64) *MiningRequest { + m.Until = Until + return m +} + +func (m *MiningRequest) SetInterval(Interval Interval) *MiningRequest { + m.Interval = Interval + return m +} + +func (m *MiningRequest) SetFormat(Format Format) *MiningRequest { + m.Format = Format + return m +} + +func (m *MiningRequest) SetTimestampFormat(TimestampFormat string) *MiningRequest { + m.TimestampFormat = TimestampFormat + return m +} + +func (m *MiningRequest) SetMetric(Metric string) *MiningRequest { + m.Metric = Metric + return m +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (m *MiningRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := m.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := m.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := m.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := m.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := m.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := m.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (m *MiningRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (m *MiningRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := m.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (m *MiningRequest) GetParametersJSON() ([]byte, error) { + params, err := m.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (m *MiningRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := m.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (m *MiningRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (m *MiningRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := m.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (m *MiningRequest) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := m.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/mining/:metric" + slugs, err := m.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = m.applySlugsToUrl(apiURL, slugs) + + req, err := m.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := m.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/protocols.go b/pkg/datasource/glassnode/glassnodeapi/protocols.go new file mode 100644 index 0000000000..3a0e62a0a1 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/protocols.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type ProtocolsRequest -url "/v1/metrics/protocols/:metric" -responseType Response +type ProtocolsRequest struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/protocols_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/protocols_request_requestgen.go new file mode 100644 index 0000000000..2b8fb6b0d5 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/protocols_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type ProtocolsRequest -url /v1/metrics/protocols/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (p *ProtocolsRequest) SetAsset(Asset string) *ProtocolsRequest { + p.Asset = Asset + return p +} + +func (p *ProtocolsRequest) SetSince(Since int64) *ProtocolsRequest { + p.Since = Since + return p +} + +func (p *ProtocolsRequest) SetUntil(Until int64) *ProtocolsRequest { + p.Until = Until + return p +} + +func (p *ProtocolsRequest) SetInterval(Interval Interval) *ProtocolsRequest { + p.Interval = Interval + return p +} + +func (p *ProtocolsRequest) SetFormat(Format Format) *ProtocolsRequest { + p.Format = Format + return p +} + +func (p *ProtocolsRequest) SetTimestampFormat(TimestampFormat string) *ProtocolsRequest { + p.TimestampFormat = TimestampFormat + return p +} + +func (p *ProtocolsRequest) SetMetric(Metric string) *ProtocolsRequest { + p.Metric = Metric + return p +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (p *ProtocolsRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := p.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := p.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := p.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := p.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := p.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := p.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (p *ProtocolsRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (p *ProtocolsRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := p.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (p *ProtocolsRequest) GetParametersJSON() ([]byte, error) { + params, err := p.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (p *ProtocolsRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := p.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (p *ProtocolsRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (p *ProtocolsRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := p.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (p *ProtocolsRequest) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := p.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/protocols/:metric" + slugs, err := p.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = p.applySlugsToUrl(apiURL, slugs) + + req, err := p.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := p.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/supply.go b/pkg/datasource/glassnode/glassnodeapi/supply.go new file mode 100644 index 0000000000..542f03b237 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/supply.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type SupplyRequest -url "/v1/metrics/supply/:metric" -responseType Response +type SupplyRequest struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/supply_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/supply_request_requestgen.go new file mode 100644 index 0000000000..2577d7da16 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/supply_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type SupplyRequest -url /v1/metrics/supply/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (s *SupplyRequest) SetAsset(Asset string) *SupplyRequest { + s.Asset = Asset + return s +} + +func (s *SupplyRequest) SetSince(Since int64) *SupplyRequest { + s.Since = Since + return s +} + +func (s *SupplyRequest) SetUntil(Until int64) *SupplyRequest { + s.Until = Until + return s +} + +func (s *SupplyRequest) SetInterval(Interval Interval) *SupplyRequest { + s.Interval = Interval + return s +} + +func (s *SupplyRequest) SetFormat(Format Format) *SupplyRequest { + s.Format = Format + return s +} + +func (s *SupplyRequest) SetTimestampFormat(TimestampFormat string) *SupplyRequest { + s.TimestampFormat = TimestampFormat + return s +} + +func (s *SupplyRequest) SetMetric(Metric string) *SupplyRequest { + s.Metric = Metric + return s +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (s *SupplyRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := s.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := s.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := s.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := s.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := s.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := s.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (s *SupplyRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (s *SupplyRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := s.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (s *SupplyRequest) GetParametersJSON() ([]byte, error) { + params, err := s.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (s *SupplyRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := s.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (s *SupplyRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (s *SupplyRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := s.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (s *SupplyRequest) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := s.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/supply/:metric" + slugs, err := s.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = s.applySlugsToUrl(apiURL, slugs) + + req, err := s.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := s.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/transactions.go b/pkg/datasource/glassnode/glassnodeapi/transactions.go new file mode 100644 index 0000000000..57e22412f5 --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/transactions.go @@ -0,0 +1,17 @@ +package glassnodeapi + +import "github.com/c9s/requestgen" + +//go:generate requestgen -method GET -type TransactionsRequest -url "/v1/metrics/transactions/:metric" -responseType Response +type TransactionsRequest struct { + Client requestgen.AuthenticatedAPIClient + + Asset string `param:"a,required,query"` + Since int64 `param:"s,query"` + Until int64 `param:"u,query"` + Interval Interval `param:"i,query"` + Format Format `param:"f,query"` + TimestampFormat string `param:"timestamp_format,query"` + + Metric string `param:"metric,slug"` +} diff --git a/pkg/datasource/glassnode/glassnodeapi/transactions_request_requestgen.go b/pkg/datasource/glassnode/glassnodeapi/transactions_request_requestgen.go new file mode 100644 index 0000000000..7ebcd983bf --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/transactions_request_requestgen.go @@ -0,0 +1,196 @@ +// Code generated by "requestgen -method GET -type TransactionsRequest -url /v1/metrics/transactions/:metric -responseType Response"; DO NOT EDIT. + +package glassnodeapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (t *TransactionsRequest) SetAsset(Asset string) *TransactionsRequest { + t.Asset = Asset + return t +} + +func (t *TransactionsRequest) SetSince(Since int64) *TransactionsRequest { + t.Since = Since + return t +} + +func (t *TransactionsRequest) SetUntil(Until int64) *TransactionsRequest { + t.Until = Until + return t +} + +func (t *TransactionsRequest) SetInterval(Interval Interval) *TransactionsRequest { + t.Interval = Interval + return t +} + +func (t *TransactionsRequest) SetFormat(Format Format) *TransactionsRequest { + t.Format = Format + return t +} + +func (t *TransactionsRequest) SetTimestampFormat(TimestampFormat string) *TransactionsRequest { + t.TimestampFormat = TimestampFormat + return t +} + +func (t *TransactionsRequest) SetMetric(Metric string) *TransactionsRequest { + t.Metric = Metric + return t +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (t *TransactionsRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check Asset field -> json key a + Asset := t.Asset + + // TEMPLATE check-required + if len(Asset) == 0 { + return nil, fmt.Errorf("a is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of Asset + params["a"] = Asset + // check Since field -> json key s + Since := t.Since + + // assign parameter of Since + params["s"] = Since + // check Until field -> json key u + Until := t.Until + + // assign parameter of Until + params["u"] = Until + // check Interval field -> json key i + Interval := t.Interval + + // assign parameter of Interval + params["i"] = Interval + // check Format field -> json key f + Format := t.Format + + // assign parameter of Format + params["f"] = Format + // check TimestampFormat field -> json key timestamp_format + TimestampFormat := t.TimestampFormat + + // assign parameter of TimestampFormat + params["timestamp_format"] = TimestampFormat + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (t *TransactionsRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (t *TransactionsRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := t.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (t *TransactionsRequest) GetParametersJSON() ([]byte, error) { + params, err := t.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (t *TransactionsRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check Metric field -> json key metric + Metric := t.Metric + + // assign parameter of Metric + params["metric"] = Metric + + return params, nil +} + +func (t *TransactionsRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (t *TransactionsRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := t.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (t *TransactionsRequest) Do(ctx context.Context) (Response, error) { + + // no body params + var params interface{} + query, err := t.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/v1/metrics/transactions/:metric" + slugs, err := t.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = t.applySlugsToUrl(apiURL, slugs) + + req, err := t.Client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := t.Client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Response + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/datasource/glassnode/glassnodeapi/types.go b/pkg/datasource/glassnode/glassnodeapi/types.go new file mode 100644 index 0000000000..bdc50f33bb --- /dev/null +++ b/pkg/datasource/glassnode/glassnodeapi/types.go @@ -0,0 +1,121 @@ +package glassnodeapi + +import ( + "encoding/json" + "time" +) + +type Interval string + +const ( + Interval1h Interval = "1h" + Interval24h Interval = "24h" + Interval10m Interval = "10m" + Interval1w Interval = "1w" + Interval1m Interval = "1month" +) + +type Format string + +const ( + FormatJSON Format = "JSON" + FormatCSV Format = "CSV" +) + +type Timestamp time.Time + +func (t Timestamp) Unix() float64 { + return float64(time.Time(t).Unix()) +} + +func (t Timestamp) String() string { + return time.Time(t).String() +} + +func (t *Timestamp) UnmarshalJSON(o []byte) error { + var timestamp int64 + if err := json.Unmarshal(o, ×tamp); err != nil { + return err + } + + *t = Timestamp(time.Unix(timestamp, 0)) + return nil +} + +/* +In Glassnode API, there are two types of response, for example: + + /v1/metrics/market/marketcap_usd + + [ + { + "t": 1614556800, + "v": 927789865185.0476 + }, + ... + ] + +and + + /v1/metrics/market/price_usd_ohlc + + [ + { + "t": 1614556800, + "o": { + "c": 49768.16035012147, + "h": 49773.18922304233, + "l": 45159.50305252744, + "o": 45159.50305252744 + } + }, + ... + ] + +both can be stored into the Response structure. + +Note: use `HasOptions` to verify the type of response. +*/ +type Response []Data +type Data struct { + Timestamp Timestamp `json:"t"` + Value float64 `json:"v"` + Options map[string]float64 `json:"o"` +} + +func (s Response) IsEmpty() bool { + return len(s) == 0 +} + +func (s Response) First() Data { + if s.IsEmpty() { + return Data{} + } + return s[0] +} +func (s Response) FirstValue() float64 { + return s.First().Value +} + +func (s Response) FirstOptions() map[string]float64 { + return s.First().Options +} + +func (s Response) Last() Data { + if s.IsEmpty() { + return Data{} + } + return s[len(s)-1] +} + +func (s Response) LastValue() float64 { + return s.Last().Value +} + +func (s Response) LastOptions() map[string]float64 { + return s.Last().Options +} + +func (s Response) HasOptions() bool { + return len(s.First().Options) != 0 +} diff --git a/pkg/depth/buffer.go b/pkg/depth/buffer.go new file mode 100644 index 0000000000..c960dfb2f9 --- /dev/null +++ b/pkg/depth/buffer.go @@ -0,0 +1,205 @@ +package depth + +import ( + "fmt" + "sync" + "sync/atomic" + "time" + + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/types" + "github.com/c9s/bbgo/pkg/util" +) + +type SnapshotFetcher func() (snapshot types.SliceOrderBook, finalUpdateID int64, err error) + +type Update struct { + FirstUpdateID, FinalUpdateID int64 + + // Object is the update object + Object types.SliceOrderBook +} + +//go:generate callbackgen -type Buffer +type Buffer struct { + buffer []Update + + finalUpdateID int64 + fetcher SnapshotFetcher + snapshot *types.SliceOrderBook + + resetCallbacks []func() + readyCallbacks []func(snapshot types.SliceOrderBook, updates []Update) + pushCallbacks []func(update Update) + + resetC chan struct{} + mu sync.Mutex + once util.Reonce + + // updateTimeout the timeout duration when not receiving update messages + updateTimeout time.Duration + + // bufferingPeriod is used to buffer the update message before we get the full depth + bufferingPeriod atomic.Value +} + +func NewBuffer(fetcher SnapshotFetcher) *Buffer { + return &Buffer{ + fetcher: fetcher, + resetC: make(chan struct{}, 1), + } +} + +func (b *Buffer) SetUpdateTimeout(d time.Duration) { + b.updateTimeout = d +} + +func (b *Buffer) SetBufferingPeriod(d time.Duration) { + b.bufferingPeriod.Store(d) +} + +func (b *Buffer) resetSnapshot() { + b.snapshot = nil + b.finalUpdateID = 0 + b.EmitReset() +} + +func (b *Buffer) emitReset() { + select { + case b.resetC <- struct{}{}: + default: + } +} + +func (b *Buffer) Reset() { + b.mu.Lock() + b.resetSnapshot() + b.emitReset() + b.mu.Unlock() +} + +// AddUpdate adds the update to the buffer or push the update to the subscriber +func (b *Buffer) AddUpdate(o types.SliceOrderBook, firstUpdateID int64, finalArgs ...int64) error { + finalUpdateID := firstUpdateID + if len(finalArgs) > 0 { + finalUpdateID = finalArgs[0] + } + + u := Update{ + FirstUpdateID: firstUpdateID, + FinalUpdateID: finalUpdateID, + Object: o, + } + + select { + case <-b.resetC: + log.Warnf("received depth reset signal, resetting...") + + // if the once goroutine is still running, overwriting this once might cause "unlock of unlocked mutex" panic. + b.once.Reset() + default: + } + + // if the snapshot is set to nil, we need to buffer the message + b.mu.Lock() + if b.snapshot == nil { + b.buffer = append(b.buffer, u) + b.once.Do(func() { + go b.tryFetch() + }) + b.mu.Unlock() + return nil + } + + // if there is a missing update, we should reset the snapshot and re-fetch the snapshot + if u.FirstUpdateID > b.finalUpdateID+1 { + // emitReset will reset the once outside the mutex lock section + b.buffer = []Update{u} + finalUpdateID = b.finalUpdateID + b.resetSnapshot() + b.emitReset() + b.mu.Unlock() + return fmt.Errorf("found missing update between finalUpdateID %d and firstUpdateID %d, diff: %d", + finalUpdateID+1, + u.FirstUpdateID, + u.FirstUpdateID-finalUpdateID) + } + + log.Debugf("depth update id %d -> %d", b.finalUpdateID, u.FinalUpdateID) + b.finalUpdateID = u.FinalUpdateID + b.mu.Unlock() + + b.EmitPush(u) + return nil +} + +func (b *Buffer) fetchAndPush() error { + book, finalUpdateID, err := b.fetcher() + if err != nil { + return err + } + + b.mu.Lock() + log.Debugf("fetched depth snapshot, final update id %d", finalUpdateID) + + if len(b.buffer) > 0 { + // the snapshot is too early + if finalUpdateID < b.buffer[0].FirstUpdateID { + b.resetSnapshot() + b.emitReset() + b.mu.Unlock() + return fmt.Errorf("depth snapshot is too early, final update %d is < the first update id %d", finalUpdateID, b.buffer[0].FirstUpdateID) + } + } + + var pushUpdates []Update + for _, u := range b.buffer { + // skip old events + if u.FirstUpdateID < finalUpdateID+1 { + continue + } + + if u.FirstUpdateID > finalUpdateID+1 { + b.resetSnapshot() + b.emitReset() + b.mu.Unlock() + return fmt.Errorf("there is a missing depth update, the update id %d > final update id %d + 1", u.FirstUpdateID, finalUpdateID) + } + + pushUpdates = append(pushUpdates, u) + + // update the final update id to the correct final update id + finalUpdateID = u.FinalUpdateID + } + + // clean the buffer since we have filtered out the buffer we want + b.buffer = nil + + // set the final update ID so that we will know if there is an update missing + b.finalUpdateID = finalUpdateID + + // set the snapshot + b.snapshot = &book + + b.mu.Unlock() + + // should unlock first then call ready + b.EmitReady(book, pushUpdates) + return nil +} + +func (b *Buffer) tryFetch() { + for { + if period := b.bufferingPeriod.Load(); period != nil { + <-time.After(period.(time.Duration)) + } + + err := b.fetchAndPush() + if err != nil { + log.WithError(err).Errorf("snapshot fetch failed") + continue + } + break + } +} diff --git a/pkg/depth/buffer_callbacks.go b/pkg/depth/buffer_callbacks.go new file mode 100644 index 0000000000..bf6527166a --- /dev/null +++ b/pkg/depth/buffer_callbacks.go @@ -0,0 +1,37 @@ +// Code generated by "callbackgen -type Buffer"; DO NOT EDIT. + +package depth + +import ( + "github.com/c9s/bbgo/pkg/types" +) + +func (b *Buffer) OnReset(cb func()) { + b.resetCallbacks = append(b.resetCallbacks, cb) +} + +func (b *Buffer) EmitReset() { + for _, cb := range b.resetCallbacks { + cb() + } +} + +func (b *Buffer) OnReady(cb func(snapshot types.SliceOrderBook, updates []Update)) { + b.readyCallbacks = append(b.readyCallbacks, cb) +} + +func (b *Buffer) EmitReady(snapshot types.SliceOrderBook, updates []Update) { + for _, cb := range b.readyCallbacks { + cb(snapshot, updates) + } +} + +func (b *Buffer) OnPush(cb func(update Update)) { + b.pushCallbacks = append(b.pushCallbacks, cb) +} + +func (b *Buffer) EmitPush(update Update) { + for _, cb := range b.pushCallbacks { + cb(update) + } +} diff --git a/pkg/depth/buffer_test.go b/pkg/depth/buffer_test.go new file mode 100644 index 0000000000..949db0f216 --- /dev/null +++ b/pkg/depth/buffer_test.go @@ -0,0 +1,157 @@ +//go:build !race +// +build !race + +package depth + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +var itov = fixedpoint.NewFromInt + +func TestDepthBuffer_ReadyState(t *testing.T) { + buf := NewBuffer(func() (book types.SliceOrderBook, finalID int64, err error) { + return types.SliceOrderBook{ + Bids: types.PriceVolumeSlice{ + {Price: itov(100), Volume: itov(1)}, + }, + Asks: types.PriceVolumeSlice{ + {Price: itov(99), Volume: itov(1)}, + }, + }, 33, nil + }) + buf.SetBufferingPeriod(time.Millisecond * 5) + + readyC := make(chan struct{}) + buf.OnReady(func(snapshot types.SliceOrderBook, updates []Update) { + assert.Greater(t, len(updates), 33) + close(readyC) + }) + + var updateID int64 = 1 + for ; updateID < 100; updateID++ { + buf.AddUpdate( + types.SliceOrderBook{ + Bids: types.PriceVolumeSlice{ + {Price: itov(100), Volume: itov(updateID)}, + }, + Asks: types.PriceVolumeSlice{ + {Price: itov(99), Volume: itov(updateID)}, + }, + }, updateID) + } + + <-readyC +} + +func TestDepthBuffer_CorruptedUpdateAtTheBeginning(t *testing.T) { + // snapshot starts from 30, + // the first ready event should have a snapshot(30) and updates (31~50) + var snapshotFinalID int64 = 0 + buf := NewBuffer(func() (types.SliceOrderBook, int64, error) { + snapshotFinalID += 30 + return types.SliceOrderBook{ + Bids: types.PriceVolumeSlice{ + {Price: itov(100), Volume: itov(1)}, + }, + Asks: types.PriceVolumeSlice{ + {Price: itov(99), Volume: itov(1)}, + }, + }, snapshotFinalID, nil + }) + + resetC := make(chan struct{}, 1) + + buf.OnReset(func() { + resetC <- struct{}{} + }) + + var updateID int64 = 10 + for ; updateID < 100; updateID++ { + if updateID == 50 { + updateID += 5 + } + + buf.AddUpdate(types.SliceOrderBook{ + Bids: types.PriceVolumeSlice{ + {Price: itov(100), Volume: itov(updateID)}, + }, + Asks: types.PriceVolumeSlice{ + {Price: itov(99), Volume: itov(updateID)}, + }, + }, updateID) + } + + <-resetC +} + +func TestDepthBuffer_ConcurrentRun(t *testing.T) { + var snapshotFinalID int64 = 0 + buf := NewBuffer(func() (types.SliceOrderBook, int64, error) { + snapshotFinalID += 30 + time.Sleep(10 * time.Millisecond) + return types.SliceOrderBook{ + Bids: types.PriceVolumeSlice{ + {Price: itov(100), Volume: itov(1)}, + }, + Asks: types.PriceVolumeSlice{ + {Price: itov(99), Volume: itov(1)}, + }, + }, snapshotFinalID, nil + }) + + readyCnt := 0 + resetCnt := 0 + pushCnt := 0 + + buf.OnPush(func(update Update) { + pushCnt++ + }) + buf.OnReady(func(snapshot types.SliceOrderBook, updates []Update) { + readyCnt++ + }) + buf.OnReset(func() { + resetCnt++ + }) + + ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second) + defer cancel() + + ticker := time.NewTicker(time.Millisecond) + defer ticker.Stop() + + var updateID int64 = 10 + + for { + select { + case <-ctx.Done(): + assert.Greater(t, readyCnt, 1) + assert.Greater(t, resetCnt, 1) + assert.Greater(t, pushCnt, 1) + return + + case <-ticker.C: + updateID++ + if updateID%100 == 0 { + updateID++ + } + + buf.AddUpdate(types.SliceOrderBook{ + Bids: types.PriceVolumeSlice{ + {Price: itov(100), Volume: itov(updateID)}, + }, + Asks: types.PriceVolumeSlice{ + {Price: itov(99), Volume: itov(updateID)}, + }, + }, updateID) + + } + } +} diff --git a/pkg/exchange/batch/batch_test.go b/pkg/exchange/batch/batch_test.go new file mode 100644 index 0000000000..389c4ab2a8 --- /dev/null +++ b/pkg/exchange/batch/batch_test.go @@ -0,0 +1 @@ +package batch diff --git a/pkg/exchange/batch/closedorders.go b/pkg/exchange/batch/closedorders.go new file mode 100644 index 0000000000..3af58e2d5f --- /dev/null +++ b/pkg/exchange/batch/closedorders.go @@ -0,0 +1,37 @@ +package batch + +import ( + "context" + "strconv" + "time" + + "github.com/c9s/bbgo/pkg/types" +) + +type ClosedOrderBatchQuery struct { + types.ExchangeTradeHistoryService +} + +func (q *ClosedOrderBatchQuery) Query(ctx context.Context, symbol string, startTime, endTime time.Time, lastOrderID uint64) (c chan types.Order, errC chan error) { + query := &AsyncTimeRangedBatchQuery{ + Type: types.Order{}, + Q: func(startTime, endTime time.Time) (interface{}, error) { + orders, err := q.ExchangeTradeHistoryService.QueryClosedOrders(ctx, symbol, startTime, endTime, lastOrderID) + return orders, err + }, + T: func(obj interface{}) time.Time { + return time.Time(obj.(types.Order).CreationTime) + }, + ID: func(obj interface{}) string { + order := obj.(types.Order) + if order.OrderID > lastOrderID { + lastOrderID = order.OrderID + } + return strconv.FormatUint(order.OrderID, 10) + }, + } + + c = make(chan types.Order, 100) + errC = query.Query(ctx, c, startTime, endTime) + return c, errC +} diff --git a/pkg/exchange/batch/deposit.go b/pkg/exchange/batch/deposit.go new file mode 100644 index 0000000000..fdb471782b --- /dev/null +++ b/pkg/exchange/batch/deposit.go @@ -0,0 +1,36 @@ +package batch + +import ( + "context" + "time" + + "golang.org/x/time/rate" + + "github.com/c9s/bbgo/pkg/types" +) + +type DepositBatchQuery struct { + types.ExchangeTransferService +} + +func (e *DepositBatchQuery) Query(ctx context.Context, asset string, startTime, endTime time.Time) (c chan types.Deposit, errC chan error) { + query := &AsyncTimeRangedBatchQuery{ + Type: types.Deposit{}, + Limiter: rate.NewLimiter(rate.Every(5*time.Second), 2), + JumpIfEmpty: time.Hour * 24 * 80, + Q: func(startTime, endTime time.Time) (interface{}, error) { + return e.ExchangeTransferService.QueryDepositHistory(ctx, asset, startTime, endTime) + }, + T: func(obj interface{}) time.Time { + return time.Time(obj.(types.Deposit).Time) + }, + ID: func(obj interface{}) string { + deposit := obj.(types.Deposit) + return deposit.TransactionID + }, + } + + c = make(chan types.Deposit, 100) + errC = query.Query(ctx, c, startTime, endTime) + return c, errC +} diff --git a/pkg/exchange/batch/kline.go b/pkg/exchange/batch/kline.go new file mode 100644 index 0000000000..a4053fe342 --- /dev/null +++ b/pkg/exchange/batch/kline.go @@ -0,0 +1,37 @@ +package batch + +import ( + "context" + "strconv" + "time" + + "github.com/c9s/bbgo/pkg/types" +) + +type KLineBatchQuery struct { + types.Exchange +} + +func (e *KLineBatchQuery) Query(ctx context.Context, symbol string, interval types.Interval, startTime, endTime time.Time) (c chan types.KLine, errC chan error) { + query := &AsyncTimeRangedBatchQuery{ + Type: types.KLine{}, + Limiter: nil, // the rate limiter is handled in the exchange query method + Q: func(startTime, endTime time.Time) (interface{}, error) { + return e.Exchange.QueryKLines(ctx, symbol, interval, types.KLineQueryOptions{ + StartTime: &startTime, + EndTime: &endTime, + }) + }, + T: func(obj interface{}) time.Time { + return time.Time(obj.(types.KLine).StartTime) + }, + ID: func(obj interface{}) string { + kline := obj.(types.KLine) + return strconv.FormatInt(kline.StartTime.UnixMilli(), 10) + }, + } + + c = make(chan types.KLine, 3000) + errC = query.Query(ctx, c, startTime, endTime) + return c, errC +} diff --git a/pkg/exchange/batch/margin_interest.go b/pkg/exchange/batch/margin_interest.go new file mode 100644 index 0000000000..4e8224bc9e --- /dev/null +++ b/pkg/exchange/batch/margin_interest.go @@ -0,0 +1,36 @@ +package batch + +import ( + "context" + "time" + + "golang.org/x/time/rate" + + "github.com/c9s/bbgo/pkg/types" +) + +type MarginInterestBatchQuery struct { + types.MarginHistory +} + +func (e *MarginInterestBatchQuery) Query(ctx context.Context, asset string, startTime, endTime time.Time) (c chan types.MarginInterest, errC chan error) { + query := &AsyncTimeRangedBatchQuery{ + Type: types.MarginInterest{}, + Limiter: rate.NewLimiter(rate.Every(5*time.Second), 2), + JumpIfEmpty: time.Hour * 24 * 30, + Q: func(startTime, endTime time.Time) (interface{}, error) { + return e.QueryInterestHistory(ctx, asset, &startTime, &endTime) + }, + T: func(obj interface{}) time.Time { + return time.Time(obj.(types.MarginInterest).Time) + }, + ID: func(obj interface{}) string { + interest := obj.(types.MarginInterest) + return interest.Time.String() + }, + } + + c = make(chan types.MarginInterest, 100) + errC = query.Query(ctx, c, startTime, endTime) + return c, errC +} diff --git a/pkg/exchange/batch/margin_liquidation.go b/pkg/exchange/batch/margin_liquidation.go new file mode 100644 index 0000000000..3726d18913 --- /dev/null +++ b/pkg/exchange/batch/margin_liquidation.go @@ -0,0 +1,37 @@ +package batch + +import ( + "context" + "strconv" + "time" + + "golang.org/x/time/rate" + + "github.com/c9s/bbgo/pkg/types" +) + +type MarginLiquidationBatchQuery struct { + types.MarginHistory +} + +func (e *MarginLiquidationBatchQuery) Query(ctx context.Context, startTime, endTime time.Time) (c chan types.MarginLiquidation, errC chan error) { + query := &AsyncTimeRangedBatchQuery{ + Type: types.MarginLiquidation{}, + Limiter: rate.NewLimiter(rate.Every(5*time.Second), 2), + JumpIfEmpty: time.Hour * 24 * 30, + Q: func(startTime, endTime time.Time) (interface{}, error) { + return e.QueryLiquidationHistory(ctx, &startTime, &endTime) + }, + T: func(obj interface{}) time.Time { + return time.Time(obj.(types.MarginLiquidation).UpdatedTime) + }, + ID: func(obj interface{}) string { + liquidation := obj.(types.MarginLiquidation) + return strconv.FormatUint(liquidation.OrderID, 10) + }, + } + + c = make(chan types.MarginLiquidation, 100) + errC = query.Query(ctx, c, startTime, endTime) + return c, errC +} diff --git a/pkg/exchange/batch/margin_loan.go b/pkg/exchange/batch/margin_loan.go new file mode 100644 index 0000000000..a32c7ea15e --- /dev/null +++ b/pkg/exchange/batch/margin_loan.go @@ -0,0 +1,37 @@ +package batch + +import ( + "context" + "strconv" + "time" + + "golang.org/x/time/rate" + + "github.com/c9s/bbgo/pkg/types" +) + +type MarginLoanBatchQuery struct { + types.MarginHistory +} + +func (e *MarginLoanBatchQuery) Query(ctx context.Context, asset string, startTime, endTime time.Time) (c chan types.MarginLoan, errC chan error) { + query := &AsyncTimeRangedBatchQuery{ + Type: types.MarginLoan{}, + Limiter: rate.NewLimiter(rate.Every(5*time.Second), 2), + JumpIfEmpty: time.Hour * 24 * 30, + Q: func(startTime, endTime time.Time) (interface{}, error) { + return e.QueryLoanHistory(ctx, asset, &startTime, &endTime) + }, + T: func(obj interface{}) time.Time { + return time.Time(obj.(types.MarginLoan).Time) + }, + ID: func(obj interface{}) string { + loan := obj.(types.MarginLoan) + return strconv.FormatUint(loan.TransactionID, 10) + }, + } + + c = make(chan types.MarginLoan, 100) + errC = query.Query(ctx, c, startTime, endTime) + return c, errC +} diff --git a/pkg/exchange/batch/margin_repay.go b/pkg/exchange/batch/margin_repay.go new file mode 100644 index 0000000000..a30ea12085 --- /dev/null +++ b/pkg/exchange/batch/margin_repay.go @@ -0,0 +1,37 @@ +package batch + +import ( + "context" + "strconv" + "time" + + "golang.org/x/time/rate" + + "github.com/c9s/bbgo/pkg/types" +) + +type MarginRepayBatchQuery struct { + types.MarginHistory +} + +func (e *MarginRepayBatchQuery) Query(ctx context.Context, asset string, startTime, endTime time.Time) (c chan types.MarginRepay, errC chan error) { + query := &AsyncTimeRangedBatchQuery{ + Type: types.MarginRepay{}, + Limiter: rate.NewLimiter(rate.Every(5*time.Second), 2), + JumpIfEmpty: time.Hour * 24 * 30, + Q: func(startTime, endTime time.Time) (interface{}, error) { + return e.QueryRepayHistory(ctx, asset, &startTime, &endTime) + }, + T: func(obj interface{}) time.Time { + return time.Time(obj.(types.MarginRepay).Time) + }, + ID: func(obj interface{}) string { + loan := obj.(types.MarginRepay) + return strconv.FormatUint(loan.TransactionID, 10) + }, + } + + c = make(chan types.MarginRepay, 100) + errC = query.Query(ctx, c, startTime, endTime) + return c, errC +} diff --git a/pkg/exchange/batch/reward.go b/pkg/exchange/batch/reward.go new file mode 100644 index 0000000000..07d39a11dc --- /dev/null +++ b/pkg/exchange/batch/reward.go @@ -0,0 +1,34 @@ +package batch + +import ( + "context" + "time" + + "golang.org/x/time/rate" + + "github.com/c9s/bbgo/pkg/types" +) + +type RewardBatchQuery struct { + Service types.ExchangeRewardService +} + +func (q *RewardBatchQuery) Query(ctx context.Context, startTime, endTime time.Time) (c chan types.Reward, errC chan error) { + query := &AsyncTimeRangedBatchQuery{ + Type: types.Reward{}, + Limiter: rate.NewLimiter(rate.Every(5*time.Second), 2), + Q: func(startTime, endTime time.Time) (interface{}, error) { + return q.Service.QueryRewards(ctx, startTime) + }, + T: func(obj interface{}) time.Time { + return time.Time(obj.(types.Reward).CreatedAt) + }, + ID: func(obj interface{}) string { + return obj.(types.Reward).UUID + }, + } + + c = make(chan types.Reward, 500) + errC = query.Query(ctx, c, startTime, endTime) + return c, errC +} diff --git a/pkg/exchange/batch/time_range_query.go b/pkg/exchange/batch/time_range_query.go new file mode 100644 index 0000000000..03535951ed --- /dev/null +++ b/pkg/exchange/batch/time_range_query.go @@ -0,0 +1,126 @@ +package batch + +import ( + "context" + "reflect" + "sort" + "time" + + "github.com/sirupsen/logrus" + "golang.org/x/time/rate" + + "github.com/c9s/bbgo/pkg/util" +) + +var log = logrus.WithField("component", "batch") + +type AsyncTimeRangedBatchQuery struct { + // Type is the object type of the result + Type interface{} + + // Limiter is the rate limiter for each query + Limiter *rate.Limiter + + // Q is the remote query function + Q func(startTime, endTime time.Time) (interface{}, error) + + // T function returns time of an object + T func(obj interface{}) time.Time + + // ID returns the ID of the object + ID func(obj interface{}) string + + // JumpIfEmpty jump the startTime + duration when the result is empty + JumpIfEmpty time.Duration +} + +func (q *AsyncTimeRangedBatchQuery) Query(ctx context.Context, ch interface{}, startTime, endTime time.Time) chan error { + errC := make(chan error, 1) + cRef := reflect.ValueOf(ch) + // cRef := reflect.MakeChan(reflect.TypeOf(q.Type), 100) + + go func() { + defer cRef.Close() + defer close(errC) + + idMap := make(map[string]struct{}, 100) + for startTime.Before(endTime) { + if q.Limiter != nil { + if err := q.Limiter.Wait(ctx); err != nil { + errC <- err + return + } + } + + log.Debugf("batch querying %T: %v <=> %v", q.Type, startTime, endTime) + + queryProfiler := util.StartTimeProfile("remoteQuery") + + sliceInf, err := q.Q(startTime, endTime) + if err != nil { + errC <- err + return + } + + listRef := reflect.ValueOf(sliceInf) + listLen := listRef.Len() + log.Debugf("batch querying %T: %d remote records", q.Type, listLen) + + queryProfiler.StopAndLog(log.Debugf) + + if listLen == 0 { + if q.JumpIfEmpty > 0 { + startTime = startTime.Add(q.JumpIfEmpty) + + log.Debugf("batch querying %T: empty records jump to %s", q.Type, startTime) + continue + } + + log.Debugf("batch querying %T: empty records, query is completed", q.Type) + return + } + + // sort by time + sort.Slice(listRef.Interface(), func(i, j int) bool { + a := listRef.Index(i) + b := listRef.Index(j) + tA := q.T(a.Interface()) + tB := q.T(b.Interface()) + return tA.Before(tB) + }) + + sentAny := false + for i := 0; i < listLen; i++ { + item := listRef.Index(i) + entryTime := q.T(item.Interface()) + + if entryTime.Before(startTime) { + continue + } + if entryTime.After(endTime) { + continue + } + + obj := item.Interface() + id := q.ID(obj) + if _, exists := idMap[id]; exists { + log.Debugf("batch querying %T: ignore duplicated record, id = %s", q.Type, id) + continue + } + + idMap[id] = struct{}{} + + cRef.Send(item) + sentAny = true + startTime = entryTime.Add(time.Millisecond) + } + + if !sentAny { + log.Debugf("batch querying %T: %d/%d records are not sent", q.Type, listLen, listLen) + return + } + } + }() + + return errC +} diff --git a/pkg/exchange/batch/time_range_query_test.go b/pkg/exchange/batch/time_range_query_test.go new file mode 100644 index 0000000000..e3d6634e0b --- /dev/null +++ b/pkg/exchange/batch/time_range_query_test.go @@ -0,0 +1,45 @@ +package batch + +import ( + "context" + "strconv" + "testing" + "time" +) + +func Test_TimeRangedQuery(t *testing.T) { + startTime := time.Date(2021, time.January, 1, 0, 0, 0, 0, time.UTC) + endTime := time.Date(2021, time.January, 2, 0, 0, 0, 0, time.UTC) + q := &AsyncTimeRangedBatchQuery{ + Type: time.Time{}, + T: func(obj interface{}) time.Time { + return obj.(time.Time) + }, + ID: func(obj interface{}) string { + return strconv.FormatInt(obj.(time.Time).UnixMilli(), 10) + }, + Q: func(startTime, endTime time.Time) (interface{}, error) { + var cnt = 0 + var data []time.Time + for startTime.Before(endTime) && cnt < 5 { + d := startTime + data = append(data, d) + cnt++ + startTime = startTime.Add(time.Minute) + } + t.Logf("data: %v", data) + return data, nil + }, + } + + ch := make(chan time.Time, 100) + + // consumer + go func() { + for d := range ch { + _ = d + } + }() + errC := q.Query(context.Background(), ch, startTime, endTime) + <-errC +} diff --git a/pkg/exchange/batch/trade.go b/pkg/exchange/batch/trade.go new file mode 100644 index 0000000000..dc5abce48d --- /dev/null +++ b/pkg/exchange/batch/trade.go @@ -0,0 +1,43 @@ +package batch + +import ( + "context" + "time" + + "github.com/c9s/bbgo/pkg/types" +) + +var closedErrChan = make(chan error) + +func init() { + close(closedErrChan) +} + +type TradeBatchQuery struct { + types.ExchangeTradeHistoryService +} + +func (e TradeBatchQuery) Query(ctx context.Context, symbol string, options *types.TradeQueryOptions) (c chan types.Trade, errC chan error) { + startTime := *options.StartTime + endTime := *options.EndTime + query := &AsyncTimeRangedBatchQuery{ + Type: types.Trade{}, + Q: func(startTime, endTime time.Time) (interface{}, error) { + return e.ExchangeTradeHistoryService.QueryTrades(ctx, symbol, options) + }, + T: func(obj interface{}) time.Time { + return time.Time(obj.(types.Trade).Time) + }, + ID: func(obj interface{}) string { + trade := obj.(types.Trade) + if trade.ID > options.LastTradeID { + options.LastTradeID = trade.ID + } + return trade.Key().String() + }, + } + + c = make(chan types.Trade, 100) + errC = query.Query(ctx, c, startTime, endTime) + return c, errC +} diff --git a/pkg/exchange/batch/withdraw.go b/pkg/exchange/batch/withdraw.go new file mode 100644 index 0000000000..36fc374896 --- /dev/null +++ b/pkg/exchange/batch/withdraw.go @@ -0,0 +1,36 @@ +package batch + +import ( + "context" + "time" + + "golang.org/x/time/rate" + + "github.com/c9s/bbgo/pkg/types" +) + +type WithdrawBatchQuery struct { + types.ExchangeTransferService +} + +func (e *WithdrawBatchQuery) Query(ctx context.Context, asset string, startTime, endTime time.Time) (c chan types.Withdraw, errC chan error) { + query := &AsyncTimeRangedBatchQuery{ + Type: types.Withdraw{}, + Limiter: rate.NewLimiter(rate.Every(5*time.Second), 2), + JumpIfEmpty: time.Hour * 24 * 80, + Q: func(startTime, endTime time.Time) (interface{}, error) { + return e.ExchangeTransferService.QueryWithdrawHistory(ctx, asset, startTime, endTime) + }, + T: func(obj interface{}) time.Time { + return time.Time(obj.(types.Withdraw).ApplyTime) + }, + ID: func(obj interface{}) string { + withdraw := obj.(types.Withdraw) + return withdraw.TransactionID + }, + } + + c = make(chan types.Withdraw, 100) + errC = query.Query(ctx, c, startTime, endTime) + return c, errC +} diff --git a/pkg/exchange/batch/withdraw_test.go b/pkg/exchange/batch/withdraw_test.go new file mode 100644 index 0000000000..68e67e9f2f --- /dev/null +++ b/pkg/exchange/batch/withdraw_test.go @@ -0,0 +1,38 @@ +package batch + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/exchange/binance" + "github.com/c9s/bbgo/pkg/testutil" +) + +func TestWithdrawBatchQuery(t *testing.T) { + key, secret, ok := testutil.IntegrationTestConfigured(t, "BINANCE") + if !ok { + t.Skip("binance api is not set") + } + + ex := binance.New(key, secret) + q := WithdrawBatchQuery{ + ExchangeTransferService: ex, + } + + ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute) + defer cancel() + now := time.Now() + startTime := now.AddDate(0, -6, 0) + endTime := now + dataC, errC := q.Query(ctx, "", startTime, endTime) + + for withdraw := range dataC { + t.Logf("%+v", withdraw) + } + + err := <-errC + assert.NoError(t, err) +} diff --git a/pkg/exchange/binance/binanceapi/alias.go b/pkg/exchange/binance/binanceapi/alias.go new file mode 100644 index 0000000000..f17052bdf3 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/alias.go @@ -0,0 +1,34 @@ +package binanceapi + +import ( + "github.com/adshao/go-binance/v2" +) + +type SideType = binance.SideType + +const SideTypeBuy = binance.SideTypeBuy +const SideTypeSell = binance.SideTypeSell + +type OrderType = binance.OrderType + +const ( + OrderTypeLimit OrderType = binance.OrderTypeLimit + OrderTypeMarket OrderType = binance.OrderTypeMarket + OrderTypeLimitMaker OrderType = binance.OrderTypeLimitMaker + OrderTypeStopLoss OrderType = binance.OrderTypeStopLoss + OrderTypeStopLossLimit OrderType = binance.OrderTypeStopLossLimit + OrderTypeTakeProfit OrderType = binance.OrderTypeTakeProfit + OrderTypeTakeProfitLimit OrderType = binance.OrderTypeTakeProfitLimit +) + +type OrderStatusType = binance.OrderStatusType + +const ( + OrderStatusTypeNew OrderStatusType = binance.OrderStatusTypeNew + OrderStatusTypePartiallyFilled OrderStatusType = binance.OrderStatusTypePartiallyFilled + OrderStatusTypeFilled OrderStatusType = binance.OrderStatusTypeFilled + OrderStatusTypeCanceled OrderStatusType = binance.OrderStatusTypeCanceled + OrderStatusTypePendingCancel OrderStatusType = binance.OrderStatusTypePendingCancel + OrderStatusTypeRejected OrderStatusType = binance.OrderStatusTypeRejected + OrderStatusTypeExpired OrderStatusType = binance.OrderStatusTypeExpired +) diff --git a/pkg/exchange/binance/binanceapi/client.go b/pkg/exchange/binance/binanceapi/client.go new file mode 100644 index 0000000000..0fa05db422 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/client.go @@ -0,0 +1,229 @@ +package binanceapi + +import ( + "bytes" + "context" + "crypto/hmac" + "crypto/sha256" + "encoding/json" + "fmt" + "net/http" + "net/url" + "strconv" + "time" + + "github.com/c9s/requestgen" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/types" +) + +const defaultHTTPTimeout = time.Second * 15 +const RestBaseURL = "https://api.binance.com" +const SandboxRestBaseURL = "https://testnet.binance.vision" +const DebugRequestResponse = false + +var DefaultHttpClient = &http.Client{ + Timeout: defaultHTTPTimeout, +} + +type RestClient struct { + requestgen.BaseAPIClient + + Key, Secret string + + recvWindow int + timeOffset int64 +} + +func NewClient(baseURL string) *RestClient { + if len(baseURL) == 0 { + baseURL = RestBaseURL + } + + u, err := url.Parse(baseURL) + if err != nil { + panic(err) + } + + client := &RestClient{ + BaseAPIClient: requestgen.BaseAPIClient{ + BaseURL: u, + HttpClient: DefaultHttpClient, + }, + } + + // client.AccountService = &AccountService{client: client} + return client +} + +func (c *RestClient) Auth(key, secret string) { + c.Key = key + // pragma: allowlist nextline secret + c.Secret = secret +} + +// NewRequest create new API request. Relative url can be provided in refURL. +func (c *RestClient) NewRequest(ctx context.Context, method, refURL string, params url.Values, payload interface{}) (*http.Request, error) { + rel, err := url.Parse(refURL) + if err != nil { + return nil, err + } + + if params != nil { + rel.RawQuery = params.Encode() + } + + body, err := castPayload(payload) + if err != nil { + return nil, err + } + + pathURL := c.BaseURL.ResolveReference(rel) + return http.NewRequestWithContext(ctx, method, pathURL.String(), bytes.NewReader(body)) +} + +func (c *RestClient) SetTimeOffsetFromServer(ctx context.Context) error { + req, err := c.NewRequest(ctx, "GET", "/api/v3/time", nil, nil) + if err != nil { + return err + } + + resp, err := c.SendRequest(req) + if err != nil { + return err + } + + var a struct { + ServerTime types.MillisecondTimestamp `json:"serverTime"` + } + + err = resp.DecodeJSON(&a) + if err != nil { + return err + } + + c.timeOffset = currentTimestamp() - a.ServerTime.Time().UnixMilli() + return nil +} + +func (c *RestClient) SendRequest(req *http.Request) (*requestgen.Response, error) { + if DebugRequestResponse { + logrus.Debugf("-> request: %+v", req) + response, err := c.BaseAPIClient.SendRequest(req) + logrus.Debugf("<- response: %s", string(response.Body)) + return response, err + } + + return c.BaseAPIClient.SendRequest(req) +} + +// newAuthenticatedRequest creates new http request for authenticated routes. +func (c *RestClient) NewAuthenticatedRequest(ctx context.Context, method, refURL string, params url.Values, payload interface{}) (*http.Request, error) { + if len(c.Key) == 0 { + return nil, errors.New("empty api key") + } + + if len(c.Secret) == 0 { + return nil, errors.New("empty api secret") + } + + rel, err := url.Parse(refURL) + if err != nil { + return nil, err + } + + if params == nil { + params = url.Values{} + } + + if c.recvWindow > 0 { + params.Set("recvWindow", strconv.Itoa(c.recvWindow)) + } + + params.Set("timestamp", strconv.FormatInt(currentTimestamp()-c.timeOffset, 10)) + rawQuery := params.Encode() + + pathURL := c.BaseURL.ResolveReference(rel) + body, err := castPayload(payload) + if err != nil { + return nil, err + } + + toSign := rawQuery + string(body) + signature := sign(c.Secret, toSign) + + // sv is the extra url parameters that we need to attach to the request + sv := url.Values{} + sv.Set("signature", signature) + if rawQuery == "" { + rawQuery = sv.Encode() + } else { + rawQuery = rawQuery + "&" + sv.Encode() + } + + if rawQuery != "" { + pathURL.RawQuery = rawQuery + } + + req, err := http.NewRequestWithContext(ctx, method, pathURL.String(), bytes.NewReader(body)) + if err != nil { + return nil, err + } + + // if our payload body is not an empty string + if len(body) > 0 { + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + } + + req.Header.Add("Accept", "application/json") + + // Build authentication headers + req.Header.Add("X-MBX-APIKEY", c.Key) + return req, nil +} + +// sign uses sha256 to sign the payload with the given secret +func sign(secret, payload string) string { + var sig = hmac.New(sha256.New, []byte(secret)) + _, err := sig.Write([]byte(payload)) + if err != nil { + return "" + } + + return fmt.Sprintf("%x", sig.Sum(nil)) +} + +func currentTimestamp() int64 { + return FormatTimestamp(time.Now()) +} + +// FormatTimestamp formats a time into Unix timestamp in milliseconds, as requested by Binance. +func FormatTimestamp(t time.Time) int64 { + return t.UnixNano() / int64(time.Millisecond) +} + +func castPayload(payload interface{}) ([]byte, error) { + if payload != nil { + switch v := payload.(type) { + case string: + return []byte(v), nil + + case []byte: + return v, nil + + default: + body, err := json.Marshal(v) + return body, err + } + } + + return nil, nil +} + +type APIResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data json.RawMessage `json:"data"` +} diff --git a/pkg/exchange/binance/binanceapi/client_test.go b/pkg/exchange/binance/binanceapi/client_test.go new file mode 100644 index 0000000000..e86bba7832 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/client_test.go @@ -0,0 +1,142 @@ +package binanceapi + +import ( + "context" + "log" + "net/http/httputil" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/testutil" +) + +func getTestClientOrSkip(t *testing.T) *RestClient { + key, secret, ok := testutil.IntegrationTestConfigured(t, "BINANCE") + if !ok { + t.SkipNow() + return nil + } + + client := NewClient("") + client.Auth(key, secret) + return client +} + +func TestClient_GetTradeFeeRequest(t *testing.T) { + client := getTestClientOrSkip(t) + ctx := context.Background() + + err := client.SetTimeOffsetFromServer(ctx) + assert.NoError(t, err) + + req := client.NewGetTradeFeeRequest() + tradeFees, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotEmpty(t, tradeFees) + t.Logf("tradeFees: %+v", tradeFees) +} + +func TestClient_GetDepositAddressRequest(t *testing.T) { + client := getTestClientOrSkip(t) + ctx := context.Background() + + err := client.SetTimeOffsetFromServer(ctx) + assert.NoError(t, err) + + req := client.NewGetDepositAddressRequest() + req.Coin("BTC") + address, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotNil(t, address) + assert.NotEmpty(t, address.Url) + assert.NotEmpty(t, address.Address) + t.Logf("deposit address: %+v", address) +} + +func TestClient_GetDepositHistoryRequest(t *testing.T) { + client := getTestClientOrSkip(t) + ctx := context.Background() + + err := client.SetTimeOffsetFromServer(ctx) + assert.NoError(t, err) + + req := client.NewGetDepositHistoryRequest() + history, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotNil(t, history) + assert.NotEmpty(t, history) + t.Logf("deposit history: %+v", history) +} + +func TestClient_NewSpotRebateHistoryRequest(t *testing.T) { + client := getTestClientOrSkip(t) + ctx := context.Background() + + err := client.SetTimeOffsetFromServer(ctx) + assert.NoError(t, err) + + req := client.NewGetSpotRebateHistoryRequest() + history, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotNil(t, history) + assert.NotEmpty(t, history) + t.Logf("spot rebate history: %+v", history) +} + +func TestClient_NewGetMarginInterestRateHistoryRequest(t *testing.T) { + client := getTestClientOrSkip(t) + ctx := context.Background() + + err := client.SetTimeOffsetFromServer(ctx) + assert.NoError(t, err) + + req := client.NewGetMarginInterestRateHistoryRequest() + req.Asset("BTC") + history, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotNil(t, history) + assert.NotEmpty(t, history) + t.Logf("interest rate history: %+v", history) +} + +func TestClient_privateCall(t *testing.T) { + key, secret, ok := testutil.IntegrationTestConfigured(t, "BINANCE") + if !ok { + t.SkipNow() + } + + client := NewClient("") + client.Auth(key, secret) + + ctx := context.Background() + + err := client.SetTimeOffsetFromServer(ctx) + assert.NoError(t, err) + + req, err := client.NewAuthenticatedRequest(ctx, "GET", "/sapi/v1/asset/tradeFee", nil, nil) + assert.NoError(t, err) + assert.NotNil(t, req) + + resp, err := client.SendRequest(req) + if assert.NoError(t, err) { + var feeStructs []struct { + Symbol string `json:"symbol"` + MakerCommission string `json:"makerCommission"` + TakerCommission string `json:"takerCommission"` + } + err = resp.DecodeJSON(&feeStructs) + if assert.NoError(t, err) { + assert.NotEmpty(t, feeStructs) + } + } else { + dump, _ := httputil.DumpRequest(req, true) + log.Printf("request: %s", dump) + } +} + +func TestClient_setTimeOffsetFromServer(t *testing.T) { + client := NewClient("") + err := client.SetTimeOffsetFromServer(context.Background()) + assert.NoError(t, err) +} diff --git a/pkg/exchange/binance/binanceapi/get_api_referral_if_new_user_request.go b/pkg/exchange/binance/binanceapi/get_api_referral_if_new_user_request.go new file mode 100644 index 0000000000..07d8857b5e --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_api_referral_if_new_user_request.go @@ -0,0 +1,19 @@ +package binanceapi + +import "github.com/c9s/requestgen" + +type ApiReferralIfNewUserResponse struct { + ApiAgentCode string `json:"apiAgentCode"` + RebateWorking bool `json:"rebateWorking"` + IfNewUser bool `json:"ifNewUser"` + ReferrerId int `json:"referrerId"` +} + +//go:generate requestgen -method GET -url "/sapi/v1/apiReferral/ifNewUser" -type GetApiReferralIfNewUserRequest -responseType .ApiReferralIfNewUserResponse +type GetApiReferralIfNewUserRequest struct { + client requestgen.AuthenticatedAPIClient +} + +func (c *RestClient) NewGetApiReferralIfNewUserRequest() *GetApiReferralIfNewUserRequest { + return &GetApiReferralIfNewUserRequest{client: c} +} diff --git a/pkg/exchange/binance/binanceapi/get_api_referral_if_new_user_request_requestgen.go b/pkg/exchange/binance/binanceapi/get_api_referral_if_new_user_request_requestgen.go new file mode 100644 index 0000000000..feb42d93d1 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_api_referral_if_new_user_request_requestgen.go @@ -0,0 +1,135 @@ +// Code generated by "requestgen -method GET -url /sapi/v1/apiReferral/ifNewUser -type GetApiReferralIfNewUserRequest -responseType .ApiReferralIfNewUserResponse"; DO NOT EDIT. + +package binanceapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetApiReferralIfNewUserRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetApiReferralIfNewUserRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetApiReferralIfNewUserRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetApiReferralIfNewUserRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetApiReferralIfNewUserRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetApiReferralIfNewUserRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetApiReferralIfNewUserRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetApiReferralIfNewUserRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetApiReferralIfNewUserRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetApiReferralIfNewUserRequest) Do(ctx context.Context) (*ApiReferralIfNewUserResponse, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/sapi/v1/apiReferral/ifNewUser" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse ApiReferralIfNewUserResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/binance/binanceapi/get_deposit_address_request.go b/pkg/exchange/binance/binanceapi/get_deposit_address_request.go new file mode 100644 index 0000000000..17b0005e31 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_deposit_address_request.go @@ -0,0 +1,25 @@ +package binanceapi + +import ( + "github.com/c9s/requestgen" +) + +type DepositAddress struct { + Address string `json:"address"` + Coin string `json:"coin"` + Tag string `json:"tag"` + Url string `json:"url"` +} + +//go:generate requestgen -method GET -url "/sapi/v1/capital/deposit/address" -type GetDepositAddressRequest -responseType .DepositAddress +type GetDepositAddressRequest struct { + client requestgen.AuthenticatedAPIClient + + coin string `param:"coin"` + + network *string `param:"network"` +} + +func (c *RestClient) NewGetDepositAddressRequest() *GetDepositAddressRequest { + return &GetDepositAddressRequest{client: c} +} diff --git a/pkg/exchange/binance/binanceapi/get_deposit_address_request_requestgen.go b/pkg/exchange/binance/binanceapi/get_deposit_address_request_requestgen.go new file mode 100644 index 0000000000..6406dcf711 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_deposit_address_request_requestgen.go @@ -0,0 +1,161 @@ +// Code generated by "requestgen -method GET -url /sapi/v1/capital/deposit/address -type GetDepositAddressRequest -responseType .DepositAddress"; DO NOT EDIT. + +package binanceapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +func (g *GetDepositAddressRequest) Coin(coin string) *GetDepositAddressRequest { + g.coin = coin + return g +} + +func (g *GetDepositAddressRequest) Network(network string) *GetDepositAddressRequest { + g.network = &network + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetDepositAddressRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetDepositAddressRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check coin field -> json key coin + coin := g.coin + + // assign parameter of coin + params["coin"] = coin + // check network field -> json key network + if g.network != nil { + network := *g.network + + // assign parameter of network + params["network"] = network + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetDepositAddressRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetDepositAddressRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetDepositAddressRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetDepositAddressRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetDepositAddressRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetDepositAddressRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetDepositAddressRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetDepositAddressRequest) Do(ctx context.Context) (*DepositAddress, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/sapi/v1/capital/deposit/address" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse DepositAddress + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/binance/binanceapi/get_deposit_history_request.go b/pkg/exchange/binance/binanceapi/get_deposit_history_request.go new file mode 100644 index 0000000000..e000de98ce --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_deposit_history_request.go @@ -0,0 +1,38 @@ +package binanceapi + +import ( + "time" + + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +type DepositHistory struct { + Amount fixedpoint.Value `json:"amount"` + Coin string `json:"coin"` + Network string `json:"network"` + Status int `json:"status"` + Address string `json:"address"` + AddressTag string `json:"addressTag"` + TxId string `json:"txId"` + InsertTime types.MillisecondTimestamp `json:"insertTime"` + TransferType int `json:"transferType"` + UnlockConfirm int `json:"unlockConfirm"` + ConfirmTimes string `json:"confirmTimes"` +} + +//go:generate requestgen -method GET -url "/sapi/v1/capital/deposit/hisrec" -type GetDepositHistoryRequest -responseType []DepositHistory +type GetDepositHistoryRequest struct { + client requestgen.AuthenticatedAPIClient + + coin *string `param:"coin"` + + startTime *time.Time `param:"startTime,milliseconds"` + endTime *time.Time `param:"endTime,milliseconds"` +} + +func (c *RestClient) NewGetDepositHistoryRequest() *GetDepositHistoryRequest { + return &GetDepositHistoryRequest{client: c} +} diff --git a/pkg/exchange/binance/binanceapi/get_deposit_history_request_requestgen.go b/pkg/exchange/binance/binanceapi/get_deposit_history_request_requestgen.go new file mode 100644 index 0000000000..dce6cb8b7b --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_deposit_history_request_requestgen.go @@ -0,0 +1,181 @@ +// Code generated by "requestgen -method GET -url /sapi/v1/capital/deposit/hisrec -type GetDepositHistoryRequest -responseType []DepositHistory"; DO NOT EDIT. + +package binanceapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" + "strconv" + "time" +) + +func (g *GetDepositHistoryRequest) Coin(coin string) *GetDepositHistoryRequest { + g.coin = &coin + return g +} + +func (g *GetDepositHistoryRequest) StartTime(startTime time.Time) *GetDepositHistoryRequest { + g.startTime = &startTime + return g +} + +func (g *GetDepositHistoryRequest) EndTime(endTime time.Time) *GetDepositHistoryRequest { + g.endTime = &endTime + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetDepositHistoryRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetDepositHistoryRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check coin field -> json key coin + if g.coin != nil { + coin := *g.coin + + // assign parameter of coin + params["coin"] = coin + } else { + } + // check startTime field -> json key startTime + if g.startTime != nil { + startTime := *g.startTime + + // assign parameter of startTime + // convert time.Time to milliseconds time stamp + params["startTime"] = strconv.FormatInt(startTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check endTime field -> json key endTime + if g.endTime != nil { + endTime := *g.endTime + + // assign parameter of endTime + // convert time.Time to milliseconds time stamp + params["endTime"] = strconv.FormatInt(endTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetDepositHistoryRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetDepositHistoryRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetDepositHistoryRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetDepositHistoryRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetDepositHistoryRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetDepositHistoryRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetDepositHistoryRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetDepositHistoryRequest) Do(ctx context.Context) ([]DepositHistory, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/sapi/v1/capital/deposit/hisrec" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []DepositHistory + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/binance/binanceapi/get_margin_interest_history_request.go b/pkg/exchange/binance/binanceapi/get_margin_interest_history_request.go new file mode 100644 index 0000000000..59d241d1b2 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_margin_interest_history_request.go @@ -0,0 +1,52 @@ +package binanceapi + +import ( + "time" + + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +// interest type in response has 4 enums: +// PERIODIC interest charged per hour +// ON_BORROW first interest charged on borrow +// PERIODIC_CONVERTED interest charged per hour converted into BNB +// ON_BORROW_CONVERTED first interest charged on borrow converted into BNB +type InterestType string + +const ( + InterestTypePeriodic InterestType = "PERIODIC" + InterestTypeOnBorrow InterestType = "ON_BORROW" + InterestTypePeriodicConverted InterestType = "PERIODIC_CONVERTED" + InterestTypeOnBorrowConverted InterestType = "ON_BORROW_CONVERTED" +) + +// MarginInterest is the user margin interest record +type MarginInterest struct { + IsolatedSymbol string `json:"isolatedSymbol"` + Asset string `json:"asset"` + Interest fixedpoint.Value `json:"interest"` + InterestAccuredTime types.MillisecondTimestamp `json:"interestAccuredTime"` + InterestRate fixedpoint.Value `json:"interestRate"` + Principal fixedpoint.Value `json:"principal"` + Type InterestType `json:"type"` +} + +//go:generate requestgen -method GET -url "/sapi/v1/margin/interestHistory" -type GetMarginInterestHistoryRequest -responseType .RowsResponse -responseDataField Rows -responseDataType []MarginInterest +type GetMarginInterestHistoryRequest struct { + client requestgen.AuthenticatedAPIClient + + asset string `param:"asset"` + startTime *time.Time `param:"startTime,milliseconds"` + endTime *time.Time `param:"endTime,milliseconds"` + isolatedSymbol *string `param:"isolatedSymbol"` + archived *bool `param:"archived"` + size *int `param:"size"` + current *int `param:"current"` +} + +func (c *RestClient) NewGetMarginInterestHistoryRequest() *GetMarginInterestHistoryRequest { + return &GetMarginInterestHistoryRequest{client: c} +} diff --git a/pkg/exchange/binance/binanceapi/get_margin_interest_history_request_requestgen.go b/pkg/exchange/binance/binanceapi/get_margin_interest_history_request_requestgen.go new file mode 100644 index 0000000000..b73d167f81 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_margin_interest_history_request_requestgen.go @@ -0,0 +1,234 @@ +// Code generated by "requestgen -method GET -url /sapi/v1/margin/interestHistory -type GetMarginInterestHistoryRequest -responseType .RowsResponse -responseDataField Rows -responseDataType []MarginInterest"; DO NOT EDIT. + +package binanceapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" + "strconv" + "time" +) + +func (g *GetMarginInterestHistoryRequest) Asset(asset string) *GetMarginInterestHistoryRequest { + g.asset = asset + return g +} + +func (g *GetMarginInterestHistoryRequest) StartTime(startTime time.Time) *GetMarginInterestHistoryRequest { + g.startTime = &startTime + return g +} + +func (g *GetMarginInterestHistoryRequest) EndTime(endTime time.Time) *GetMarginInterestHistoryRequest { + g.endTime = &endTime + return g +} + +func (g *GetMarginInterestHistoryRequest) IsolatedSymbol(isolatedSymbol string) *GetMarginInterestHistoryRequest { + g.isolatedSymbol = &isolatedSymbol + return g +} + +func (g *GetMarginInterestHistoryRequest) Archived(archived bool) *GetMarginInterestHistoryRequest { + g.archived = &archived + return g +} + +func (g *GetMarginInterestHistoryRequest) Size(size int) *GetMarginInterestHistoryRequest { + g.size = &size + return g +} + +func (g *GetMarginInterestHistoryRequest) Current(current int) *GetMarginInterestHistoryRequest { + g.current = ¤t + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetMarginInterestHistoryRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetMarginInterestHistoryRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check asset field -> json key asset + asset := g.asset + + // assign parameter of asset + params["asset"] = asset + // check startTime field -> json key startTime + if g.startTime != nil { + startTime := *g.startTime + + // assign parameter of startTime + // convert time.Time to milliseconds time stamp + params["startTime"] = strconv.FormatInt(startTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check endTime field -> json key endTime + if g.endTime != nil { + endTime := *g.endTime + + // assign parameter of endTime + // convert time.Time to milliseconds time stamp + params["endTime"] = strconv.FormatInt(endTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check isolatedSymbol field -> json key isolatedSymbol + if g.isolatedSymbol != nil { + isolatedSymbol := *g.isolatedSymbol + + // assign parameter of isolatedSymbol + params["isolatedSymbol"] = isolatedSymbol + } else { + } + // check archived field -> json key archived + if g.archived != nil { + archived := *g.archived + + // assign parameter of archived + params["archived"] = archived + } else { + } + // check size field -> json key size + if g.size != nil { + size := *g.size + + // assign parameter of size + params["size"] = size + } else { + } + // check current field -> json key current + if g.current != nil { + current := *g.current + + // assign parameter of current + params["current"] = current + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetMarginInterestHistoryRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetMarginInterestHistoryRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetMarginInterestHistoryRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetMarginInterestHistoryRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetMarginInterestHistoryRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetMarginInterestHistoryRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetMarginInterestHistoryRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetMarginInterestHistoryRequest) Do(ctx context.Context) ([]MarginInterest, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/sapi/v1/margin/interestHistory" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse RowsResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []MarginInterest + if err := json.Unmarshal(apiResponse.Rows, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/exchange/binance/binanceapi/get_margin_interest_history_request_test.go b/pkg/exchange/binance/binanceapi/get_margin_interest_history_request_test.go new file mode 100644 index 0000000000..60540c35c8 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_margin_interest_history_request_test.go @@ -0,0 +1,29 @@ +package binanceapi + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func Test_GetMarginInterestHistoryRequest(t *testing.T) { + client := getTestClientOrSkip(t) + ctx := context.Background() + + err := client.SetTimeOffsetFromServer(ctx) + assert.NoError(t, err) + + req := client.NewGetMarginInterestHistoryRequest() + req.Asset("USDT") + req.IsolatedSymbol("DOTUSDT") + req.StartTime(time.Date(2022, time.February, 1, 0, 0, 0, 0, time.UTC)) + req.EndTime(time.Date(2022, time.March, 1, 0, 0, 0, 0, time.UTC)) + req.Size(100) + + records, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotEmpty(t, records) + t.Logf("interest: %+v", records) +} diff --git a/pkg/exchange/binance/binanceapi/get_margin_interest_rate_history_request.go b/pkg/exchange/binance/binanceapi/get_margin_interest_rate_history_request.go new file mode 100644 index 0000000000..86d05dd72f --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_margin_interest_rate_history_request.go @@ -0,0 +1,30 @@ +package binanceapi + +import ( + "time" + + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +type MarginInterestRate struct { + Asset string `json:"asset"` + DailyInterestRate fixedpoint.Value `json:"dailyInterestRate"` + Timestamp types.MillisecondTimestamp `json:"timestamp"` + VipLevel int `json:"vipLevel"` +} + +//go:generate requestgen -method GET -url "/sapi/v1/margin/interestRateHistory" -type GetMarginInterestRateHistoryRequest -responseType []MarginInterestRate +type GetMarginInterestRateHistoryRequest struct { + client requestgen.APIClient + + asset string `param:"asset"` + startTime *time.Time `param:"startTime,milliseconds"` + endTime *time.Time `param:"endTime,milliseconds"` +} + +func (c *RestClient) NewGetMarginInterestRateHistoryRequest() *GetMarginInterestRateHistoryRequest { + return &GetMarginInterestRateHistoryRequest{client: c} +} diff --git a/pkg/exchange/binance/binanceapi/get_margin_interest_rate_history_request_requestgen.go b/pkg/exchange/binance/binanceapi/get_margin_interest_rate_history_request_requestgen.go new file mode 100644 index 0000000000..1f80665cc3 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_margin_interest_rate_history_request_requestgen.go @@ -0,0 +1,178 @@ +// Code generated by "requestgen -method GET -url /sapi/v1/margin/interestRateHistory -type GetMarginInterestRateHistoryRequest -responseType []MarginInterestRate"; DO NOT EDIT. + +package binanceapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" + "strconv" + "time" +) + +func (g *GetMarginInterestRateHistoryRequest) Asset(asset string) *GetMarginInterestRateHistoryRequest { + g.asset = asset + return g +} + +func (g *GetMarginInterestRateHistoryRequest) StartTime(startTime time.Time) *GetMarginInterestRateHistoryRequest { + g.startTime = &startTime + return g +} + +func (g *GetMarginInterestRateHistoryRequest) EndTime(endTime time.Time) *GetMarginInterestRateHistoryRequest { + g.endTime = &endTime + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetMarginInterestRateHistoryRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetMarginInterestRateHistoryRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check asset field -> json key asset + asset := g.asset + + // assign parameter of asset + params["asset"] = asset + // check startTime field -> json key startTime + if g.startTime != nil { + startTime := *g.startTime + + // assign parameter of startTime + // convert time.Time to milliseconds time stamp + params["startTime"] = strconv.FormatInt(startTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check endTime field -> json key endTime + if g.endTime != nil { + endTime := *g.endTime + + // assign parameter of endTime + // convert time.Time to milliseconds time stamp + params["endTime"] = strconv.FormatInt(endTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetMarginInterestRateHistoryRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetMarginInterestRateHistoryRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetMarginInterestRateHistoryRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetMarginInterestRateHistoryRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetMarginInterestRateHistoryRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetMarginInterestRateHistoryRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetMarginInterestRateHistoryRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetMarginInterestRateHistoryRequest) Do(ctx context.Context) ([]MarginInterestRate, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/sapi/v1/margin/interestRateHistory" + + req, err := g.client.NewRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []MarginInterestRate + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/binance/binanceapi/get_margin_liquidation_history_request.go b/pkg/exchange/binance/binanceapi/get_margin_liquidation_history_request.go new file mode 100644 index 0000000000..31ce6c73d7 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_margin_liquidation_history_request.go @@ -0,0 +1,38 @@ +package binanceapi + +import ( + "time" + + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +type MarginLiquidationRecord struct { + AveragePrice fixedpoint.Value `json:"avgPrice"` + ExecutedQuantity fixedpoint.Value `json:"executedQty"` + OrderId uint64 `json:"orderId"` + Price fixedpoint.Value `json:"price"` + Quantity fixedpoint.Value `json:"qty"` + Side SideType `json:"side"` + Symbol string `json:"symbol"` + TimeInForce string `json:"timeInForce"` + IsIsolated bool `json:"isIsolated"` + UpdatedTime types.MillisecondTimestamp `json:"updatedTime"` +} + +//go:generate requestgen -method GET -url "/sapi/v1/margin/forceLiquidationRec" -type GetMarginLiquidationHistoryRequest -responseType .RowsResponse -responseDataField Rows -responseDataType []MarginLiquidationRecord +type GetMarginLiquidationHistoryRequest struct { + client requestgen.AuthenticatedAPIClient + + isolatedSymbol *string `param:"isolatedSymbol"` + startTime *time.Time `param:"startTime,milliseconds"` + endTime *time.Time `param:"endTime,milliseconds"` + size *int `param:"size"` + current *int `param:"current"` +} + +func (c *RestClient) NewGetMarginLiquidationHistoryRequest() *GetMarginLiquidationHistoryRequest { + return &GetMarginLiquidationHistoryRequest{client: c} +} diff --git a/pkg/exchange/binance/binanceapi/get_margin_liquidation_history_request_requestgen.go b/pkg/exchange/binance/binanceapi/get_margin_liquidation_history_request_requestgen.go new file mode 100644 index 0000000000..942491998a --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_margin_liquidation_history_request_requestgen.go @@ -0,0 +1,211 @@ +// Code generated by "requestgen -method GET -url /sapi/v1/margin/forceLiquidationRec -type GetMarginLiquidationHistoryRequest -responseType .RowsResponse -responseDataField Rows -responseDataType []MarginLiquidationRecord"; DO NOT EDIT. + +package binanceapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" + "strconv" + "time" +) + +func (g *GetMarginLiquidationHistoryRequest) IsolatedSymbol(isolatedSymbol string) *GetMarginLiquidationHistoryRequest { + g.isolatedSymbol = &isolatedSymbol + return g +} + +func (g *GetMarginLiquidationHistoryRequest) StartTime(startTime time.Time) *GetMarginLiquidationHistoryRequest { + g.startTime = &startTime + return g +} + +func (g *GetMarginLiquidationHistoryRequest) EndTime(endTime time.Time) *GetMarginLiquidationHistoryRequest { + g.endTime = &endTime + return g +} + +func (g *GetMarginLiquidationHistoryRequest) Size(size int) *GetMarginLiquidationHistoryRequest { + g.size = &size + return g +} + +func (g *GetMarginLiquidationHistoryRequest) Current(current int) *GetMarginLiquidationHistoryRequest { + g.current = ¤t + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetMarginLiquidationHistoryRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetMarginLiquidationHistoryRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check isolatedSymbol field -> json key isolatedSymbol + if g.isolatedSymbol != nil { + isolatedSymbol := *g.isolatedSymbol + + // assign parameter of isolatedSymbol + params["isolatedSymbol"] = isolatedSymbol + } else { + } + // check startTime field -> json key startTime + if g.startTime != nil { + startTime := *g.startTime + + // assign parameter of startTime + // convert time.Time to milliseconds time stamp + params["startTime"] = strconv.FormatInt(startTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check endTime field -> json key endTime + if g.endTime != nil { + endTime := *g.endTime + + // assign parameter of endTime + // convert time.Time to milliseconds time stamp + params["endTime"] = strconv.FormatInt(endTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check size field -> json key size + if g.size != nil { + size := *g.size + + // assign parameter of size + params["size"] = size + } else { + } + // check current field -> json key current + if g.current != nil { + current := *g.current + + // assign parameter of current + params["current"] = current + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetMarginLiquidationHistoryRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetMarginLiquidationHistoryRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetMarginLiquidationHistoryRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetMarginLiquidationHistoryRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetMarginLiquidationHistoryRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetMarginLiquidationHistoryRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetMarginLiquidationHistoryRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetMarginLiquidationHistoryRequest) Do(ctx context.Context) ([]MarginLiquidationRecord, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/sapi/v1/margin/forceLiquidationRec" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse RowsResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []MarginLiquidationRecord + if err := json.Unmarshal(apiResponse.Rows, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/exchange/binance/binanceapi/get_margin_loan_history_request.go b/pkg/exchange/binance/binanceapi/get_margin_loan_history_request.go new file mode 100644 index 0000000000..e7a801a9dd --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_margin_loan_history_request.go @@ -0,0 +1,54 @@ +package binanceapi + +import ( + "time" + + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +// one of PENDING (pending execution), CONFIRMED (successfully loaned), FAILED (execution failed, nothing happened to your account); +type LoanStatus string + +const ( + LoanStatusPending LoanStatus = "PENDING" + LoanStatusConfirmed LoanStatus = "CONFIRMED" + LoanStatusFailed LoanStatus = "FAILED" +) + +type MarginLoanRecord struct { + IsolatedSymbol string `json:"isolatedSymbol"` + TxId int64 `json:"txId"` + Asset string `json:"asset"` + Principal fixedpoint.Value `json:"principal"` + Timestamp types.MillisecondTimestamp `json:"timestamp"` + Status LoanStatus `json:"status"` +} + +// GetMarginLoanHistoryRequest +// +// txId or startTime must be sent. txId takes precedence. +// Response in descending order +// If isolatedSymbol is not sent, crossed margin data will be returned +// The max interval between startTime and endTime is 30 days. +// If startTime and endTime not sent, return records of the last 7 days by default +// Set archived to true to query data from 6 months ago +// +//go:generate requestgen -method GET -url "/sapi/v1/margin/loan" -type GetMarginLoanHistoryRequest -responseType .RowsResponse -responseDataField Rows -responseDataType []MarginLoanRecord +type GetMarginLoanHistoryRequest struct { + client requestgen.AuthenticatedAPIClient + + asset string `param:"asset"` + startTime *time.Time `param:"startTime,milliseconds"` + endTime *time.Time `param:"endTime,milliseconds"` + isolatedSymbol *string `param:"isolatedSymbol"` + archived *bool `param:"archived"` + size *int `param:"size"` + current *int `param:"current"` +} + +func (c *RestClient) NewGetMarginLoanHistoryRequest() *GetMarginLoanHistoryRequest { + return &GetMarginLoanHistoryRequest{client: c} +} diff --git a/pkg/exchange/binance/binanceapi/get_margin_loan_history_request_requestgen.go b/pkg/exchange/binance/binanceapi/get_margin_loan_history_request_requestgen.go new file mode 100644 index 0000000000..d893d55f57 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_margin_loan_history_request_requestgen.go @@ -0,0 +1,234 @@ +// Code generated by "requestgen -method GET -url /sapi/v1/margin/loan -type GetMarginLoanHistoryRequest -responseType .RowsResponse -responseDataField Rows -responseDataType []MarginLoanRecord"; DO NOT EDIT. + +package binanceapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" + "strconv" + "time" +) + +func (g *GetMarginLoanHistoryRequest) Asset(asset string) *GetMarginLoanHistoryRequest { + g.asset = asset + return g +} + +func (g *GetMarginLoanHistoryRequest) StartTime(startTime time.Time) *GetMarginLoanHistoryRequest { + g.startTime = &startTime + return g +} + +func (g *GetMarginLoanHistoryRequest) EndTime(endTime time.Time) *GetMarginLoanHistoryRequest { + g.endTime = &endTime + return g +} + +func (g *GetMarginLoanHistoryRequest) IsolatedSymbol(isolatedSymbol string) *GetMarginLoanHistoryRequest { + g.isolatedSymbol = &isolatedSymbol + return g +} + +func (g *GetMarginLoanHistoryRequest) Archived(archived bool) *GetMarginLoanHistoryRequest { + g.archived = &archived + return g +} + +func (g *GetMarginLoanHistoryRequest) Size(size int) *GetMarginLoanHistoryRequest { + g.size = &size + return g +} + +func (g *GetMarginLoanHistoryRequest) Current(current int) *GetMarginLoanHistoryRequest { + g.current = ¤t + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetMarginLoanHistoryRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetMarginLoanHistoryRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check asset field -> json key asset + asset := g.asset + + // assign parameter of asset + params["asset"] = asset + // check startTime field -> json key startTime + if g.startTime != nil { + startTime := *g.startTime + + // assign parameter of startTime + // convert time.Time to milliseconds time stamp + params["startTime"] = strconv.FormatInt(startTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check endTime field -> json key endTime + if g.endTime != nil { + endTime := *g.endTime + + // assign parameter of endTime + // convert time.Time to milliseconds time stamp + params["endTime"] = strconv.FormatInt(endTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check isolatedSymbol field -> json key isolatedSymbol + if g.isolatedSymbol != nil { + isolatedSymbol := *g.isolatedSymbol + + // assign parameter of isolatedSymbol + params["isolatedSymbol"] = isolatedSymbol + } else { + } + // check archived field -> json key archived + if g.archived != nil { + archived := *g.archived + + // assign parameter of archived + params["archived"] = archived + } else { + } + // check size field -> json key size + if g.size != nil { + size := *g.size + + // assign parameter of size + params["size"] = size + } else { + } + // check current field -> json key current + if g.current != nil { + current := *g.current + + // assign parameter of current + params["current"] = current + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetMarginLoanHistoryRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetMarginLoanHistoryRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetMarginLoanHistoryRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetMarginLoanHistoryRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetMarginLoanHistoryRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetMarginLoanHistoryRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetMarginLoanHistoryRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetMarginLoanHistoryRequest) Do(ctx context.Context) ([]MarginLoanRecord, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/sapi/v1/margin/loan" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse RowsResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []MarginLoanRecord + if err := json.Unmarshal(apiResponse.Rows, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/exchange/binance/binanceapi/get_margin_loan_history_request_test.go b/pkg/exchange/binance/binanceapi/get_margin_loan_history_request_test.go new file mode 100644 index 0000000000..c9daa028f1 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_margin_loan_history_request_test.go @@ -0,0 +1,29 @@ +package binanceapi + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func Test_GetMarginLoanHistoryRequest(t *testing.T) { + client := getTestClientOrSkip(t) + ctx := context.Background() + + err := client.SetTimeOffsetFromServer(ctx) + assert.NoError(t, err) + + req := client.NewGetMarginLoanHistoryRequest() + req.Asset("USDT") + req.IsolatedSymbol("DOTUSDT") + req.StartTime(time.Date(2022, time.February, 1, 0, 0, 0, 0, time.UTC)) + req.EndTime(time.Date(2022, time.March, 1, 0, 0, 0, 0, time.UTC)) + req.Size(100) + + records, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotEmpty(t, records) + t.Logf("loans: %+v", records) +} diff --git a/pkg/exchange/binance/binanceapi/get_margin_repay_history_request.go b/pkg/exchange/binance/binanceapi/get_margin_repay_history_request.go new file mode 100644 index 0000000000..6d9a13448b --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_margin_repay_history_request.go @@ -0,0 +1,47 @@ +package binanceapi + +import ( + "time" + + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +// RepayStatus one of PENDING (pending execution), CONFIRMED (successfully loaned), FAILED (execution failed, nothing happened to your account); +type RepayStatus string + +const ( + RepayStatusPending LoanStatus = "PENDING" + RepayStatusConfirmed LoanStatus = "CONFIRMED" + RepayStatusFailed LoanStatus = "FAILED" +) + +type MarginRepayRecord struct { + IsolatedSymbol string `json:"isolatedSymbol"` + Amount fixedpoint.Value `json:"amount"` + Asset string `json:"asset"` + Interest fixedpoint.Value `json:"interest"` + Principal fixedpoint.Value `json:"principal"` + Status string `json:"status"` + Timestamp types.MillisecondTimestamp `json:"timestamp"` + TxId uint64 `json:"txId"` +} + +//go:generate requestgen -method GET -url "/sapi/v1/margin/repay" -type GetMarginRepayHistoryRequest -responseType .RowsResponse -responseDataField Rows -responseDataType []MarginRepayRecord +type GetMarginRepayHistoryRequest struct { + client requestgen.AuthenticatedAPIClient + + asset string `param:"asset"` + startTime *time.Time `param:"startTime,milliseconds"` + endTime *time.Time `param:"endTime,milliseconds"` + isolatedSymbol *string `param:"isolatedSymbol"` + archived *bool `param:"archived"` + size *int `param:"size"` + current *int `param:"current"` +} + +func (c *RestClient) NewGetMarginRepayHistoryRequest() *GetMarginRepayHistoryRequest { + return &GetMarginRepayHistoryRequest{client: c} +} diff --git a/pkg/exchange/binance/binanceapi/get_margin_repay_history_request_requestgen.go b/pkg/exchange/binance/binanceapi/get_margin_repay_history_request_requestgen.go new file mode 100644 index 0000000000..17e5364155 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_margin_repay_history_request_requestgen.go @@ -0,0 +1,234 @@ +// Code generated by "requestgen -method GET -url /sapi/v1/margin/repay -type GetMarginRepayHistoryRequest -responseType .RowsResponse -responseDataField Rows -responseDataType []MarginRepayRecord"; DO NOT EDIT. + +package binanceapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" + "strconv" + "time" +) + +func (g *GetMarginRepayHistoryRequest) Asset(asset string) *GetMarginRepayHistoryRequest { + g.asset = asset + return g +} + +func (g *GetMarginRepayHistoryRequest) StartTime(startTime time.Time) *GetMarginRepayHistoryRequest { + g.startTime = &startTime + return g +} + +func (g *GetMarginRepayHistoryRequest) EndTime(endTime time.Time) *GetMarginRepayHistoryRequest { + g.endTime = &endTime + return g +} + +func (g *GetMarginRepayHistoryRequest) IsolatedSymbol(isolatedSymbol string) *GetMarginRepayHistoryRequest { + g.isolatedSymbol = &isolatedSymbol + return g +} + +func (g *GetMarginRepayHistoryRequest) Archived(archived bool) *GetMarginRepayHistoryRequest { + g.archived = &archived + return g +} + +func (g *GetMarginRepayHistoryRequest) Size(size int) *GetMarginRepayHistoryRequest { + g.size = &size + return g +} + +func (g *GetMarginRepayHistoryRequest) Current(current int) *GetMarginRepayHistoryRequest { + g.current = ¤t + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetMarginRepayHistoryRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetMarginRepayHistoryRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check asset field -> json key asset + asset := g.asset + + // assign parameter of asset + params["asset"] = asset + // check startTime field -> json key startTime + if g.startTime != nil { + startTime := *g.startTime + + // assign parameter of startTime + // convert time.Time to milliseconds time stamp + params["startTime"] = strconv.FormatInt(startTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check endTime field -> json key endTime + if g.endTime != nil { + endTime := *g.endTime + + // assign parameter of endTime + // convert time.Time to milliseconds time stamp + params["endTime"] = strconv.FormatInt(endTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check isolatedSymbol field -> json key isolatedSymbol + if g.isolatedSymbol != nil { + isolatedSymbol := *g.isolatedSymbol + + // assign parameter of isolatedSymbol + params["isolatedSymbol"] = isolatedSymbol + } else { + } + // check archived field -> json key archived + if g.archived != nil { + archived := *g.archived + + // assign parameter of archived + params["archived"] = archived + } else { + } + // check size field -> json key size + if g.size != nil { + size := *g.size + + // assign parameter of size + params["size"] = size + } else { + } + // check current field -> json key current + if g.current != nil { + current := *g.current + + // assign parameter of current + params["current"] = current + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetMarginRepayHistoryRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetMarginRepayHistoryRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetMarginRepayHistoryRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetMarginRepayHistoryRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetMarginRepayHistoryRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetMarginRepayHistoryRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetMarginRepayHistoryRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetMarginRepayHistoryRequest) Do(ctx context.Context) ([]MarginRepayRecord, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/sapi/v1/margin/repay" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse RowsResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []MarginRepayRecord + if err := json.Unmarshal(apiResponse.Rows, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/exchange/binance/binanceapi/get_margin_repay_history_request_test.go b/pkg/exchange/binance/binanceapi/get_margin_repay_history_request_test.go new file mode 100644 index 0000000000..5161d32ff1 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_margin_repay_history_request_test.go @@ -0,0 +1,29 @@ +package binanceapi + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func Test_GetMarginRepayHistoryRequest(t *testing.T) { + client := getTestClientOrSkip(t) + ctx := context.Background() + + err := client.SetTimeOffsetFromServer(ctx) + assert.NoError(t, err) + + req := client.NewGetMarginRepayHistoryRequest() + req.Asset("USDT") + req.IsolatedSymbol("DOTUSDT") + req.StartTime(time.Date(2022, time.February, 1, 0, 0, 0, 0, time.UTC)) + req.EndTime(time.Date(2022, time.March, 1, 0, 0, 0, 0, time.UTC)) + req.Size(100) + + records, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotEmpty(t, records) + t.Logf("loans: %+v", records) +} diff --git a/pkg/exchange/binance/binanceapi/get_spot_rebate_history_request.go b/pkg/exchange/binance/binanceapi/get_spot_rebate_history_request.go new file mode 100644 index 0000000000..7fff74ffbe --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_spot_rebate_history_request.go @@ -0,0 +1,41 @@ +package binanceapi + +import ( + "time" + + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +// rebate type:1 is commission rebate,2 is referral kickback +type RebateType int + +const ( + RebateTypeCommission = 1 + RebateTypeReferralKickback = 2 +) + +type SpotRebate struct { + Asset string `json:"asset"` + Type RebateType `json:"type"` + Amount fixedpoint.Value `json:"amount"` + UpdateTime types.MillisecondTimestamp `json:"updateTime"` +} + +// GetSpotRebateHistoryRequest +// The max interval between startTime and endTime is 30 days. +// If startTime and endTime are not sent, the recent 7 days' data will be returned. +// The earliest startTime is supported on June 10, 2020 +//go:generate requestgen -method GET -url "/sapi/v1/rebate/taxQuery" -type GetSpotRebateHistoryRequest -responseType PagedDataResponse -responseDataField Data.Data -responseDataType []SpotRebate +type GetSpotRebateHistoryRequest struct { + client requestgen.AuthenticatedAPIClient + + startTime *time.Time `param:"startTime,milliseconds"` + endTime *time.Time `param:"endTime,milliseconds"` +} + +func (c *RestClient) NewGetSpotRebateHistoryRequest() *GetSpotRebateHistoryRequest { + return &GetSpotRebateHistoryRequest{client: c} +} diff --git a/pkg/exchange/binance/binanceapi/get_spot_rebate_history_request_requestgen.go b/pkg/exchange/binance/binanceapi/get_spot_rebate_history_request_requestgen.go new file mode 100644 index 0000000000..05cc5b67e6 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_spot_rebate_history_request_requestgen.go @@ -0,0 +1,172 @@ +// Code generated by "requestgen -method GET -url /sapi/v1/rebate/taxQuery -type GetSpotRebateHistoryRequest -responseType PagedDataResponse -responseDataField Data.Data -responseDataType []SpotRebate"; DO NOT EDIT. + +package binanceapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" + "strconv" + "time" +) + +func (g *GetSpotRebateHistoryRequest) StartTime(startTime time.Time) *GetSpotRebateHistoryRequest { + g.startTime = &startTime + return g +} + +func (g *GetSpotRebateHistoryRequest) EndTime(endTime time.Time) *GetSpotRebateHistoryRequest { + g.endTime = &endTime + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetSpotRebateHistoryRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetSpotRebateHistoryRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check startTime field -> json key startTime + if g.startTime != nil { + startTime := *g.startTime + + // assign parameter of startTime + // convert time.Time to milliseconds time stamp + params["startTime"] = strconv.FormatInt(startTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check endTime field -> json key endTime + if g.endTime != nil { + endTime := *g.endTime + + // assign parameter of endTime + // convert time.Time to milliseconds time stamp + params["endTime"] = strconv.FormatInt(endTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetSpotRebateHistoryRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetSpotRebateHistoryRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetSpotRebateHistoryRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetSpotRebateHistoryRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetSpotRebateHistoryRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetSpotRebateHistoryRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetSpotRebateHistoryRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetSpotRebateHistoryRequest) Do(ctx context.Context) ([]SpotRebate, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/sapi/v1/rebate/taxQuery" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse PagedDataResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []SpotRebate + if err := json.Unmarshal(apiResponse.Data.Data, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/exchange/binance/binanceapi/get_trade_fee_request.go b/pkg/exchange/binance/binanceapi/get_trade_fee_request.go new file mode 100644 index 0000000000..0b6c544062 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_trade_fee_request.go @@ -0,0 +1,22 @@ +package binanceapi + +import ( + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type TradeFee struct { + Symbol string `json:"symbol"` + MakerCommission fixedpoint.Value `json:"makerCommission"` + TakerCommission fixedpoint.Value `json:"takerCommission"` +} + +//go:generate requestgen -method GET -url "/sapi/v1/asset/tradeFee" -type GetTradeFeeRequest -responseType []TradeFee +type GetTradeFeeRequest struct { + client requestgen.AuthenticatedAPIClient +} + +func (c *RestClient) NewGetTradeFeeRequest() *GetTradeFeeRequest { + return &GetTradeFeeRequest{client: c} +} diff --git a/pkg/exchange/binance/binanceapi/get_trade_fee_request_requestgen.go b/pkg/exchange/binance/binanceapi/get_trade_fee_request_requestgen.go new file mode 100644 index 0000000000..77aac0c9e0 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_trade_fee_request_requestgen.go @@ -0,0 +1,135 @@ +// Code generated by "requestgen -method GET -url /sapi/v1/asset/tradeFee -type GetTradeFeeRequest -responseType []TradeFee"; DO NOT EDIT. + +package binanceapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetTradeFeeRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetTradeFeeRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetTradeFeeRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetTradeFeeRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetTradeFeeRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetTradeFeeRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetTradeFeeRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetTradeFeeRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetTradeFeeRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetTradeFeeRequest) Do(ctx context.Context) ([]TradeFee, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/sapi/v1/asset/tradeFee" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []TradeFee + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/binance/binanceapi/get_withdraw_history_request.go b/pkg/exchange/binance/binanceapi/get_withdraw_history_request.go new file mode 100644 index 0000000000..4e84dbc249 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_withdraw_history_request.go @@ -0,0 +1,67 @@ +package binanceapi + +import ( + "time" + + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +//go:generate stringer -type=TransferType +// 1 for internal transfer, 0 for external transfer +type TransferType int + +const ( + TransferTypeInternal TransferType = 0 + TransferTypeExternal TransferType = 0 +) + +type WithdrawRecord struct { + Id string `json:"id"` + Address string `json:"address"` + Amount fixedpoint.Value `json:"amount"` + ApplyTime string `json:"applyTime"` + Coin string `json:"coin"` + WithdrawOrderID string `json:"withdrawOrderId"` + Network string `json:"network"` + TransferType TransferType `json:"transferType"` + Status WithdrawStatus `json:"status"` + TransactionFee fixedpoint.Value `json:"transactionFee"` + ConfirmNo int `json:"confirmNo"` + Info string `json:"info"` + TxID string `json:"txId"` +} + +//go:generate stringer -type=WithdrawStatus +type WithdrawStatus int + +// WithdrawStatus: 0(0:Email Sent,1:Cancelled 2:Awaiting Approval 3:Rejected 4:Processing 5:Failure 6:Completed) +const ( + WithdrawStatusEmailSent WithdrawStatus = iota + WithdrawStatusCancelled + WithdrawStatusAwaitingApproval + WithdrawStatusRejected + WithdrawStatusProcessing + WithdrawStatusFailure + WithdrawStatusCompleted +) + +//go:generate requestgen -method GET -url "/sapi/v1/capital/withdraw/history" -type GetWithdrawHistoryRequest -responseType []WithdrawRecord +type GetWithdrawHistoryRequest struct { + client requestgen.AuthenticatedAPIClient + coin string `param:"coin"` + + withdrawOrderId *string `param:"withdrawOrderId"` + + status *WithdrawStatus `param:"status"` + + startTime *time.Time `param:"startTime,milliseconds"` + endTime *time.Time `param:"endTime,milliseconds"` + limit *uint64 `param:"limit"` + offset *uint64 `param:"offset"` +} + +func (c *RestClient) NewGetWithdrawHistoryRequest() *GetWithdrawHistoryRequest { + return &GetWithdrawHistoryRequest{client: c} +} diff --git a/pkg/exchange/binance/binanceapi/get_withdraw_history_request_requestgen.go b/pkg/exchange/binance/binanceapi/get_withdraw_history_request_requestgen.go new file mode 100644 index 0000000000..74717d3c44 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/get_withdraw_history_request_requestgen.go @@ -0,0 +1,241 @@ +// Code generated by "requestgen -method GET -url /sapi/v1/capital/withdraw/history -type GetWithdrawHistoryRequest -responseType []WithdrawRecord"; DO NOT EDIT. + +package binanceapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" + "strconv" + "time" +) + +func (g *GetWithdrawHistoryRequest) Coin(coin string) *GetWithdrawHistoryRequest { + g.coin = coin + return g +} + +func (g *GetWithdrawHistoryRequest) WithdrawOrderId(withdrawOrderId string) *GetWithdrawHistoryRequest { + g.withdrawOrderId = &withdrawOrderId + return g +} + +func (g *GetWithdrawHistoryRequest) Status(status WithdrawStatus) *GetWithdrawHistoryRequest { + g.status = &status + return g +} + +func (g *GetWithdrawHistoryRequest) StartTime(startTime time.Time) *GetWithdrawHistoryRequest { + g.startTime = &startTime + return g +} + +func (g *GetWithdrawHistoryRequest) EndTime(endTime time.Time) *GetWithdrawHistoryRequest { + g.endTime = &endTime + return g +} + +func (g *GetWithdrawHistoryRequest) Limit(limit uint64) *GetWithdrawHistoryRequest { + g.limit = &limit + return g +} + +func (g *GetWithdrawHistoryRequest) Offset(offset uint64) *GetWithdrawHistoryRequest { + g.offset = &offset + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetWithdrawHistoryRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetWithdrawHistoryRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check coin field -> json key coin + coin := g.coin + + // assign parameter of coin + params["coin"] = coin + // check withdrawOrderId field -> json key withdrawOrderId + if g.withdrawOrderId != nil { + withdrawOrderId := *g.withdrawOrderId + + // assign parameter of withdrawOrderId + params["withdrawOrderId"] = withdrawOrderId + } else { + } + // check status field -> json key status + if g.status != nil { + status := *g.status + + // TEMPLATE check-valid-values + switch status { + case WithdrawStatusEmailSent: + params["status"] = status + + default: + return nil, fmt.Errorf("status value %v is invalid", status) + + } + // END TEMPLATE check-valid-values + + // assign parameter of status + params["status"] = status + } else { + } + // check startTime field -> json key startTime + if g.startTime != nil { + startTime := *g.startTime + + // assign parameter of startTime + // convert time.Time to milliseconds time stamp + params["startTime"] = strconv.FormatInt(startTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check endTime field -> json key endTime + if g.endTime != nil { + endTime := *g.endTime + + // assign parameter of endTime + // convert time.Time to milliseconds time stamp + params["endTime"] = strconv.FormatInt(endTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check limit field -> json key limit + if g.limit != nil { + limit := *g.limit + + // assign parameter of limit + params["limit"] = limit + } else { + } + // check offset field -> json key offset + if g.offset != nil { + offset := *g.offset + + // assign parameter of offset + params["offset"] = offset + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetWithdrawHistoryRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetWithdrawHistoryRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetWithdrawHistoryRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetWithdrawHistoryRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetWithdrawHistoryRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetWithdrawHistoryRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetWithdrawHistoryRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetWithdrawHistoryRequest) Do(ctx context.Context) ([]WithdrawRecord, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/sapi/v1/capital/withdraw/history" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []WithdrawRecord + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/binance/binanceapi/page.go b/pkg/exchange/binance/binanceapi/page.go new file mode 100644 index 0000000000..1daec6472e --- /dev/null +++ b/pkg/exchange/binance/binanceapi/page.go @@ -0,0 +1,15 @@ +package binanceapi + +import "encoding/json" + +type PagedDataResponse struct { + Status string `json:"status"` + Type string `json:"type"` + Code string `json:"code"` + Data struct { + Page int `json:"page"` + TotalRecords int `json:"totalRecords"` + TotalPageNum int `json:"totalPageNum"` + Data json.RawMessage `json:"data"` + } `json:"data"` +} diff --git a/pkg/exchange/binance/binanceapi/rows.go b/pkg/exchange/binance/binanceapi/rows.go new file mode 100644 index 0000000000..60398419a7 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/rows.go @@ -0,0 +1,8 @@ +package binanceapi + +import "encoding/json" + +type RowsResponse struct { + Rows json.RawMessage `json:"rows"` + Total int `json:"total"` +} diff --git a/pkg/exchange/binance/binanceapi/transfertype_string.go b/pkg/exchange/binance/binanceapi/transfertype_string.go new file mode 100644 index 0000000000..8fad40b79b --- /dev/null +++ b/pkg/exchange/binance/binanceapi/transfertype_string.go @@ -0,0 +1,24 @@ +// Code generated by "stringer -type=TransferType"; DO NOT EDIT. + +package binanceapi + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[TransferTypeInternal-0] + _ = x[TransferTypeExternal-0] +} + +const _TransferType_name = "TransferTypeInternal" + +var _TransferType_index = [...]uint8{0, 20} + +func (i TransferType) String() string { + if i < 0 || i >= TransferType(len(_TransferType_index)-1) { + return "TransferType(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _TransferType_name[_TransferType_index[i]:_TransferType_index[i+1]] +} diff --git a/pkg/exchange/binance/binanceapi/withdraw_request.go b/pkg/exchange/binance/binanceapi/withdraw_request.go new file mode 100644 index 0000000000..50388292b8 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/withdraw_request.go @@ -0,0 +1,41 @@ +package binanceapi + +import "github.com/c9s/requestgen" + +type WalletType int + +const ( + WalletTypeSpot WalletType = 0 + WalletTypeFunding WalletType = 1 +) + +type WithdrawResponse struct { + ID string `json:"id"` +} + +//go:generate requestgen -method POST -url "/sapi/v1/capital/withdraw/apply" -type WithdrawRequest -responseType .WithdrawResponse +type WithdrawRequest struct { + client requestgen.AuthenticatedAPIClient + coin string `param:"coin"` + network *string `param:"network"` + + address string `param:"address"` + addressTag *string `param:"addressTag"` + + // amount is a decimal in string format + amount string `param:"amount"` + + withdrawOrderId *string `param:"withdrawOrderId"` + + transactionFeeFlag *bool `param:"transactionFeeFlag"` + + // name is the address name + name *string `param:"name"` + + // The wallet type for withdraw: 0-spot wallet ,1-funding wallet.Default spot wallet + walletType *WalletType `param:"walletType"` +} + +func (c *RestClient) NewWithdrawRequest() *WithdrawRequest { + return &WithdrawRequest{client: c} +} diff --git a/pkg/exchange/binance/binanceapi/withdraw_request_requestgen.go b/pkg/exchange/binance/binanceapi/withdraw_request_requestgen.go new file mode 100644 index 0000000000..557041c2e7 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/withdraw_request_requestgen.go @@ -0,0 +1,256 @@ +// Code generated by "requestgen -method POST -url /sapi/v1/capital/withdraw/apply -type WithdrawRequest -responseType .WithdrawResponse"; DO NOT EDIT. + +package binanceapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +func (w *WithdrawRequest) Coin(coin string) *WithdrawRequest { + w.coin = coin + return w +} + +func (w *WithdrawRequest) Network(network string) *WithdrawRequest { + w.network = &network + return w +} + +func (w *WithdrawRequest) Address(address string) *WithdrawRequest { + w.address = address + return w +} + +func (w *WithdrawRequest) AddressTag(addressTag string) *WithdrawRequest { + w.addressTag = &addressTag + return w +} + +func (w *WithdrawRequest) Amount(amount string) *WithdrawRequest { + w.amount = amount + return w +} + +func (w *WithdrawRequest) WithdrawOrderId(withdrawOrderId string) *WithdrawRequest { + w.withdrawOrderId = &withdrawOrderId + return w +} + +func (w *WithdrawRequest) TransactionFeeFlag(transactionFeeFlag bool) *WithdrawRequest { + w.transactionFeeFlag = &transactionFeeFlag + return w +} + +func (w *WithdrawRequest) Name(name string) *WithdrawRequest { + w.name = &name + return w +} + +func (w *WithdrawRequest) WalletType(walletType WalletType) *WithdrawRequest { + w.walletType = &walletType + return w +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (w *WithdrawRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (w *WithdrawRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check coin field -> json key coin + coin := w.coin + + // assign parameter of coin + params["coin"] = coin + // check network field -> json key network + if w.network != nil { + network := *w.network + + // assign parameter of network + params["network"] = network + } else { + } + // check address field -> json key address + address := w.address + + // assign parameter of address + params["address"] = address + // check addressTag field -> json key addressTag + if w.addressTag != nil { + addressTag := *w.addressTag + + // assign parameter of addressTag + params["addressTag"] = addressTag + } else { + } + // check amount field -> json key amount + amount := w.amount + + // assign parameter of amount + params["amount"] = amount + // check withdrawOrderId field -> json key withdrawOrderId + if w.withdrawOrderId != nil { + withdrawOrderId := *w.withdrawOrderId + + // assign parameter of withdrawOrderId + params["withdrawOrderId"] = withdrawOrderId + } else { + } + // check transactionFeeFlag field -> json key transactionFeeFlag + if w.transactionFeeFlag != nil { + transactionFeeFlag := *w.transactionFeeFlag + + // assign parameter of transactionFeeFlag + params["transactionFeeFlag"] = transactionFeeFlag + } else { + } + // check name field -> json key name + if w.name != nil { + name := *w.name + + // assign parameter of name + params["name"] = name + } else { + } + // check walletType field -> json key walletType + if w.walletType != nil { + walletType := *w.walletType + + // TEMPLATE check-valid-values + switch walletType { + case WalletTypeSpot, WalletTypeFunding: + params["walletType"] = walletType + + default: + return nil, fmt.Errorf("walletType value %v is invalid", walletType) + + } + // END TEMPLATE check-valid-values + + // assign parameter of walletType + params["walletType"] = walletType + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (w *WithdrawRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := w.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if w.isVarSlice(_v) { + w.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (w *WithdrawRequest) GetParametersJSON() ([]byte, error) { + params, err := w.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (w *WithdrawRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (w *WithdrawRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (w *WithdrawRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (w *WithdrawRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (w *WithdrawRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := w.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (w *WithdrawRequest) Do(ctx context.Context) (*WithdrawResponse, error) { + + params, err := w.GetParameters() + if err != nil { + return nil, err + } + query := url.Values{} + + apiURL := "/sapi/v1/capital/withdraw/apply" + + req, err := w.client.NewAuthenticatedRequest(ctx, "POST", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := w.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse WithdrawResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/binance/binanceapi/withdrawstatus_string.go b/pkg/exchange/binance/binanceapi/withdrawstatus_string.go new file mode 100644 index 0000000000..7c972b7fd5 --- /dev/null +++ b/pkg/exchange/binance/binanceapi/withdrawstatus_string.go @@ -0,0 +1,29 @@ +// Code generated by "stringer -type=WithdrawStatus"; DO NOT EDIT. + +package binanceapi + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[WithdrawStatusEmailSent-0] + _ = x[WithdrawStatusCancelled-1] + _ = x[WithdrawStatusAwaitingApproval-2] + _ = x[WithdrawStatusRejected-3] + _ = x[WithdrawStatusProcessing-4] + _ = x[WithdrawStatusFailure-5] + _ = x[WithdrawStatusCompleted-6] +} + +const _WithdrawStatus_name = "WithdrawStatusEmailSentWithdrawStatusCancelledWithdrawStatusAwaitingApprovalWithdrawStatusRejectedWithdrawStatusProcessingWithdrawStatusFailureWithdrawStatusCompleted" + +var _WithdrawStatus_index = [...]uint8{0, 23, 46, 76, 98, 122, 143, 166} + +func (i WithdrawStatus) String() string { + if i < 0 || i >= WithdrawStatus(len(_WithdrawStatus_index)-1) { + return "WithdrawStatus(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _WithdrawStatus_name[_WithdrawStatus_index[i]:_WithdrawStatus_index[i+1]] +} diff --git a/pkg/exchange/binance/convert.go b/pkg/exchange/binance/convert.go index 41330cd61b..41504db4cc 100644 --- a/pkg/exchange/binance/convert.go +++ b/pkg/exchange/binance/convert.go @@ -2,18 +2,129 @@ package binance import ( "fmt" - "strconv" + "strings" "time" "github.com/adshao/go-binance/v2" + "github.com/adshao/go-binance/v2/futures" "github.com/pkg/errors" + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" - "github.com/c9s/bbgo/pkg/util" ) +func toGlobalMarket(symbol binance.Symbol) types.Market { + market := types.Market{ + Symbol: symbol.Symbol, + LocalSymbol: symbol.Symbol, + PricePrecision: symbol.QuotePrecision, + VolumePrecision: symbol.BaseAssetPrecision, + QuoteCurrency: symbol.QuoteAsset, + BaseCurrency: symbol.BaseAsset, + } + + if f := symbol.MinNotionalFilter(); f != nil { + market.MinNotional = fixedpoint.MustNewFromString(f.MinNotional) + market.MinAmount = fixedpoint.MustNewFromString(f.MinNotional) + } + + // The LOT_SIZE filter defines the quantity (aka "lots" in auction terms) rules for a symbol. + // There are 3 parts: + // minQty defines the minimum quantity/icebergQty allowed. + // maxQty defines the maximum quantity/icebergQty allowed. + // stepSize defines the intervals that a quantity/icebergQty can be increased/decreased by. + if f := symbol.LotSizeFilter(); f != nil { + market.MinQuantity = fixedpoint.MustNewFromString(f.MinQuantity) + market.MaxQuantity = fixedpoint.MustNewFromString(f.MaxQuantity) + market.StepSize = fixedpoint.MustNewFromString(f.StepSize) + } + + if f := symbol.PriceFilter(); f != nil { + market.MaxPrice = fixedpoint.MustNewFromString(f.MaxPrice) + market.MinPrice = fixedpoint.MustNewFromString(f.MinPrice) + market.TickSize = fixedpoint.MustNewFromString(f.TickSize) + } + + return market +} + +// TODO: Cuz it returns types.Market as well, merge following to the above function +func toGlobalFuturesMarket(symbol futures.Symbol) types.Market { + market := types.Market{ + Symbol: symbol.Symbol, + LocalSymbol: symbol.Symbol, + PricePrecision: symbol.QuotePrecision, + VolumePrecision: symbol.BaseAssetPrecision, + QuoteCurrency: symbol.QuoteAsset, + BaseCurrency: symbol.BaseAsset, + } + + if f := symbol.MinNotionalFilter(); f != nil { + market.MinNotional = fixedpoint.MustNewFromString(f.Notional) + market.MinAmount = fixedpoint.MustNewFromString(f.Notional) + } + + // The LOT_SIZE filter defines the quantity (aka "lots" in auction terms) rules for a symbol. + // There are 3 parts: + // minQty defines the minimum quantity/icebergQty allowed. + // maxQty defines the maximum quantity/icebergQty allowed. + // stepSize defines the intervals that a quantity/icebergQty can be increased/decreased by. + if f := symbol.LotSizeFilter(); f != nil { + market.MinQuantity = fixedpoint.MustNewFromString(f.MinQuantity) + market.MaxQuantity = fixedpoint.MustNewFromString(f.MaxQuantity) + market.StepSize = fixedpoint.MustNewFromString(f.StepSize) + } + + if f := symbol.PriceFilter(); f != nil { + market.MaxPrice = fixedpoint.MustNewFromString(f.MaxPrice) + market.MinPrice = fixedpoint.MustNewFromString(f.MinPrice) + market.TickSize = fixedpoint.MustNewFromString(f.TickSize) + } + + return market +} + +//func toGlobalIsolatedMarginAccount(account *binance.IsolatedMarginAccount) *types.IsolatedMarginAccount { +// return &types.IsolatedMarginAccount{ +// TotalAssetOfBTC: fixedpoint.MustNewFromString(account.TotalNetAssetOfBTC), +// TotalLiabilityOfBTC: fixedpoint.MustNewFromString(account.TotalLiabilityOfBTC), +// TotalNetAssetOfBTC: fixedpoint.MustNewFromString(account.TotalNetAssetOfBTC), +// Assets: toGlobalIsolatedMarginAssets(account.Assets), +// } +//} + +func toGlobalTicker(stats *binance.PriceChangeStats) (*types.Ticker, error) { + return &types.Ticker{ + Volume: fixedpoint.MustNewFromString(stats.Volume), + Last: fixedpoint.MustNewFromString(stats.LastPrice), + Open: fixedpoint.MustNewFromString(stats.OpenPrice), + High: fixedpoint.MustNewFromString(stats.HighPrice), + Low: fixedpoint.MustNewFromString(stats.LowPrice), + Buy: fixedpoint.MustNewFromString(stats.BidPrice), + Sell: fixedpoint.MustNewFromString(stats.AskPrice), + Time: time.Unix(0, stats.CloseTime*int64(time.Millisecond)), + }, nil +} + +func toGlobalFuturesTicker(stats *futures.PriceChangeStats) (*types.Ticker, error) { + return &types.Ticker{ + Volume: fixedpoint.MustNewFromString(stats.Volume), + Last: fixedpoint.MustNewFromString(stats.LastPrice), + Open: fixedpoint.MustNewFromString(stats.OpenPrice), + High: fixedpoint.MustNewFromString(stats.HighPrice), + Low: fixedpoint.MustNewFromString(stats.LowPrice), + Buy: fixedpoint.MustNewFromString(stats.LastPrice), + Sell: fixedpoint.MustNewFromString(stats.LastPrice), + Time: time.Unix(0, stats.CloseTime*int64(time.Millisecond)), + }, nil +} + func toLocalOrderType(orderType types.OrderType) (binance.OrderType, error) { switch orderType { + + case types.OrderTypeLimitMaker: + return binance.OrderTypeLimitMaker, nil + case types.OrderTypeLimit: return binance.OrderTypeLimit, nil @@ -27,12 +138,12 @@ func toLocalOrderType(orderType types.OrderType) (binance.OrderType, error) { return binance.OrderTypeMarket, nil } - return "", fmt.Errorf("order type %s not supported", orderType) + return "", fmt.Errorf("can not convert to local order, order type %s not supported", orderType) } -func ToGlobalOrders(binanceOrders []*binance.Order) (orders []types.Order, err error) { +func toGlobalOrders(binanceOrders []*binance.Order) (orders []types.Order, err error) { for _, binanceOrder := range binanceOrders { - order, err := ToGlobalOrder(binanceOrder, false) + order, err := toGlobalOrder(binanceOrder, false) if err != nil { return orders, err } @@ -43,24 +154,24 @@ func ToGlobalOrders(binanceOrders []*binance.Order) (orders []types.Order, err e return orders, err } -func ToGlobalOrder(binanceOrder *binance.Order, isMargin bool) (*types.Order, error) { +func toGlobalOrder(binanceOrder *binance.Order, isMargin bool) (*types.Order, error) { return &types.Order{ SubmitOrder: types.SubmitOrder{ ClientOrderID: binanceOrder.ClientOrderID, Symbol: binanceOrder.Symbol, Side: toGlobalSideType(binanceOrder.Side), Type: toGlobalOrderType(binanceOrder.Type), - Quantity: util.MustParseFloat(binanceOrder.OrigQuantity), - Price: util.MustParseFloat(binanceOrder.Price), - TimeInForce: string(binanceOrder.TimeInForce), + Quantity: fixedpoint.MustNewFromString(binanceOrder.OrigQuantity), + Price: fixedpoint.MustNewFromString(binanceOrder.Price), + TimeInForce: types.TimeInForce(binanceOrder.TimeInForce), }, - Exchange: types.ExchangeBinance.String(), + Exchange: types.ExchangeBinance, IsWorking: binanceOrder.IsWorking, OrderID: uint64(binanceOrder.OrderID), Status: toGlobalOrderStatus(binanceOrder.Status), - ExecutedQuantity: util.MustParseFloat(binanceOrder.ExecutedQuantity), - CreationTime: millisecondTime(binanceOrder.Time), - UpdateTime: millisecondTime(binanceOrder.UpdateTime), + ExecutedQuantity: fixedpoint.MustNewFromString(binanceOrder.ExecutedQuantity), + CreationTime: types.Time(millisecondTime(binanceOrder.Time)), + UpdateTime: types.Time(millisecondTime(binanceOrder.UpdateTime)), IsMargin: isMargin, IsIsolated: binanceOrder.IsIsolated, }, nil @@ -70,7 +181,7 @@ func millisecondTime(t int64) time.Time { return time.Unix(0, t*int64(time.Millisecond)) } -func ToGlobalTrade(t binance.TradeV3, isMargin bool) (*types.Trade, error) { +func toGlobalTrade(t binance.TradeV3, isMargin bool) (*types.Trade, error) { // skip trade ID that is the same. however this should not happen var side types.SideType if t.IsBuyer { @@ -79,43 +190,45 @@ func ToGlobalTrade(t binance.TradeV3, isMargin bool) (*types.Trade, error) { side = types.SideTypeSell } - price, err := strconv.ParseFloat(t.Price, 64) + price, err := fixedpoint.NewFromString(t.Price) if err != nil { return nil, errors.Wrapf(err, "price parse error, price: %+v", t.Price) } - quantity, err := strconv.ParseFloat(t.Quantity, 64) + quantity, err := fixedpoint.NewFromString(t.Quantity) if err != nil { return nil, errors.Wrapf(err, "quantity parse error, quantity: %+v", t.Quantity) } - var quoteQuantity = 0.0 + var quoteQuantity fixedpoint.Value if len(t.QuoteQuantity) > 0 { - quoteQuantity, err = strconv.ParseFloat(t.QuoteQuantity, 64) + quoteQuantity, err = fixedpoint.NewFromString(t.QuoteQuantity) if err != nil { return nil, errors.Wrapf(err, "quote quantity parse error, quoteQuantity: %+v", t.QuoteQuantity) } + } else { + quoteQuantity = price.Mul(quantity) } - fee, err := strconv.ParseFloat(t.Commission, 64) + fee, err := fixedpoint.NewFromString(t.Commission) if err != nil { return nil, errors.Wrapf(err, "commission parse error, commission: %+v", t.Commission) } return &types.Trade{ - ID: t.ID, + ID: uint64(t.ID), OrderID: uint64(t.OrderID), Price: price, Symbol: t.Symbol, Exchange: "binance", Quantity: quantity, + QuoteQuantity: quoteQuantity, Side: side, IsBuyer: t.IsBuyer, IsMaker: t.IsMaker, Fee: fee, FeeCurrency: t.CommissionAsset, - QuoteQuantity: quoteQuantity, - Time: millisecondTime(t.Time), + Time: types.Time(millisecondTime(t.Time)), IsMargin: isMargin, IsIsolated: t.IsIsolated, }, nil @@ -130,7 +243,7 @@ func toGlobalSideType(side binance.SideType) types.SideType { return types.SideTypeSell default: - log.Errorf("unknown side type: %v", side) + log.Errorf("can not convert binance side type, unknown side type: %q", side) return "" } } @@ -178,16 +291,42 @@ func toGlobalOrderStatus(orderStatus binance.OrderStatusType) types.OrderStatus return types.OrderStatus(orderStatus) } -// ConvertTrades converts the binance v3 trade into the global trade type -func ConvertTrades(remoteTrades []*binance.TradeV3) (trades []types.Trade, err error) { - for _, t := range remoteTrades { - trade, err := ToGlobalTrade(*t, false) - if err != nil { - return nil, errors.Wrapf(err, "binance v3 trade parse error, trade: %+v", *t) +func convertSubscription(s types.Subscription) string { + // binance uses lower case symbol name, + // for kline, it's "@kline_" + // for depth, it's "@depth OR @depth@100ms" + switch s.Channel { + case types.KLineChannel: + return fmt.Sprintf("%s@%s_%s", strings.ToLower(s.Symbol), s.Channel, s.Options.String()) + case types.BookChannel: + // depth values: 5, 10, 20 + // Stream Names: @depth OR @depth@100ms. + // Update speed: 1000ms or 100ms + n := strings.ToLower(s.Symbol) + "@depth" + switch s.Options.Depth { + case types.DepthLevel5: + n += "5" + + case types.DepthLevelMedium: + n += "20" + + case types.DepthLevelFull: + default: + } - trades = append(trades, *trade) + switch s.Options.Speed { + case types.SpeedHigh: + n += "@100ms" + + case types.SpeedLow: + n += "@1000ms" + + } + return n + case types.BookTickerChannel: + return fmt.Sprintf("%s@bookTicker", strings.ToLower(s.Symbol)) } - return trades, err + return fmt.Sprintf("%s@%s", strings.ToLower(s.Symbol), s.Channel) } diff --git a/pkg/exchange/binance/convert_futures.go b/pkg/exchange/binance/convert_futures.go new file mode 100644 index 0000000000..4a26f248c2 --- /dev/null +++ b/pkg/exchange/binance/convert_futures.go @@ -0,0 +1,279 @@ +package binance + +import ( + "fmt" + "time" + + "github.com/adshao/go-binance/v2/futures" + "github.com/pkg/errors" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func toGlobalFuturesAccountInfo(account *futures.Account) *types.FuturesAccountInfo { + return &types.FuturesAccountInfo{ + Assets: toGlobalFuturesUserAssets(account.Assets), + Positions: toGlobalFuturesPositions(account.Positions), + TotalInitialMargin: fixedpoint.MustNewFromString(account.TotalInitialMargin), + TotalMaintMargin: fixedpoint.MustNewFromString(account.TotalMaintMargin), + TotalMarginBalance: fixedpoint.MustNewFromString(account.TotalMarginBalance), + TotalOpenOrderInitialMargin: fixedpoint.MustNewFromString(account.TotalOpenOrderInitialMargin), + TotalPositionInitialMargin: fixedpoint.MustNewFromString(account.TotalPositionInitialMargin), + TotalUnrealizedProfit: fixedpoint.MustNewFromString(account.TotalUnrealizedProfit), + TotalWalletBalance: fixedpoint.MustNewFromString(account.TotalWalletBalance), + UpdateTime: account.UpdateTime, + } +} + +func toGlobalFuturesBalance(balances []*futures.Balance) types.BalanceMap { + retBalances := make(types.BalanceMap) + for _, balance := range balances { + retBalances[balance.Asset] = types.Balance{ + Currency: balance.Asset, + Available: fixedpoint.MustNewFromString(balance.AvailableBalance), + } + } + return retBalances +} + +func toGlobalFuturesPositions(futuresPositions []*futures.AccountPosition) types.FuturesPositionMap { + retFuturesPositions := make(types.FuturesPositionMap) + for _, futuresPosition := range futuresPositions { + retFuturesPositions[futuresPosition.Symbol] = types.FuturesPosition{ // TODO: types.FuturesPosition + Isolated: futuresPosition.Isolated, + PositionRisk: &types.PositionRisk{ + Leverage: fixedpoint.MustNewFromString(futuresPosition.Leverage), + }, + Symbol: futuresPosition.Symbol, + UpdateTime: futuresPosition.UpdateTime, + } + } + + return retFuturesPositions +} + +func toGlobalFuturesUserAssets(assets []*futures.AccountAsset) (retAssets types.FuturesAssetMap) { + retFuturesAssets := make(types.FuturesAssetMap) + for _, futuresAsset := range assets { + retFuturesAssets[futuresAsset.Asset] = types.FuturesUserAsset{ + Asset: futuresAsset.Asset, + InitialMargin: fixedpoint.MustNewFromString(futuresAsset.InitialMargin), + MaintMargin: fixedpoint.MustNewFromString(futuresAsset.MaintMargin), + MarginBalance: fixedpoint.MustNewFromString(futuresAsset.MarginBalance), + MaxWithdrawAmount: fixedpoint.MustNewFromString(futuresAsset.MaxWithdrawAmount), + OpenOrderInitialMargin: fixedpoint.MustNewFromString(futuresAsset.OpenOrderInitialMargin), + PositionInitialMargin: fixedpoint.MustNewFromString(futuresAsset.PositionInitialMargin), + UnrealizedProfit: fixedpoint.MustNewFromString(futuresAsset.UnrealizedProfit), + WalletBalance: fixedpoint.MustNewFromString(futuresAsset.WalletBalance), + } + } + + return retFuturesAssets +} + +func toLocalFuturesOrderType(orderType types.OrderType) (futures.OrderType, error) { + switch orderType { + + // case types.OrderTypeLimitMaker: + // return futures.OrderTypeLimitMaker, nil //TODO + + case types.OrderTypeLimit, types.OrderTypeLimitMaker: + return futures.OrderTypeLimit, nil + + // case types.OrderTypeStopLimit: + // return futures.OrderTypeStopLossLimit, nil //TODO + + // case types.OrderTypeStopMarket: + // return futures.OrderTypeStopLoss, nil //TODO + + case types.OrderTypeMarket: + return futures.OrderTypeMarket, nil + } + + return "", fmt.Errorf("can not convert to local order, order type %s not supported", orderType) +} + +func toGlobalFuturesOrders(futuresOrders []*futures.Order) (orders []types.Order, err error) { + for _, futuresOrder := range futuresOrders { + order, err := toGlobalFuturesOrder(futuresOrder, false) + if err != nil { + return orders, err + } + + orders = append(orders, *order) + } + + return orders, err +} + +func toGlobalFuturesOrder(futuresOrder *futures.Order, isMargin bool) (*types.Order, error) { + return &types.Order{ + SubmitOrder: types.SubmitOrder{ + ClientOrderID: futuresOrder.ClientOrderID, + Symbol: futuresOrder.Symbol, + Side: toGlobalFuturesSideType(futuresOrder.Side), + Type: toGlobalFuturesOrderType(futuresOrder.Type), + ReduceOnly: futuresOrder.ReduceOnly, + ClosePosition: futuresOrder.ClosePosition, + Quantity: fixedpoint.MustNewFromString(futuresOrder.OrigQuantity), + Price: fixedpoint.MustNewFromString(futuresOrder.Price), + TimeInForce: types.TimeInForce(futuresOrder.TimeInForce), + }, + Exchange: types.ExchangeBinance, + OrderID: uint64(futuresOrder.OrderID), + Status: toGlobalFuturesOrderStatus(futuresOrder.Status), + ExecutedQuantity: fixedpoint.MustNewFromString(futuresOrder.ExecutedQuantity), + CreationTime: types.Time(millisecondTime(futuresOrder.Time)), + UpdateTime: types.Time(millisecondTime(futuresOrder.UpdateTime)), + IsMargin: isMargin, + }, nil +} + +func toGlobalFuturesTrade(t futures.AccountTrade) (*types.Trade, error) { + // skip trade ID that is the same. however this should not happen + var side types.SideType + if t.Buyer { + side = types.SideTypeBuy + } else { + side = types.SideTypeSell + } + + price, err := fixedpoint.NewFromString(t.Price) + if err != nil { + return nil, errors.Wrapf(err, "price parse error, price: %+v", t.Price) + } + + quantity, err := fixedpoint.NewFromString(t.Quantity) + if err != nil { + return nil, errors.Wrapf(err, "quantity parse error, quantity: %+v", t.Quantity) + } + + var quoteQuantity fixedpoint.Value + if len(t.QuoteQuantity) > 0 { + quoteQuantity, err = fixedpoint.NewFromString(t.QuoteQuantity) + if err != nil { + return nil, errors.Wrapf(err, "quote quantity parse error, quoteQuantity: %+v", t.QuoteQuantity) + } + } else { + quoteQuantity = price.Mul(quantity) + } + + fee, err := fixedpoint.NewFromString(t.Commission) + if err != nil { + return nil, errors.Wrapf(err, "commission parse error, commission: %+v", t.Commission) + } + + return &types.Trade{ + ID: uint64(t.ID), + OrderID: uint64(t.OrderID), + Price: price, + Symbol: t.Symbol, + Exchange: "binance", + Quantity: quantity, + QuoteQuantity: quoteQuantity, + Side: side, + IsBuyer: t.Buyer, + IsMaker: t.Maker, + Fee: fee, + FeeCurrency: t.CommissionAsset, + Time: types.Time(millisecondTime(t.Time)), + IsFutures: true, + }, nil +} + +func toGlobalFuturesSideType(side futures.SideType) types.SideType { + switch side { + case futures.SideTypeBuy: + return types.SideTypeBuy + + case futures.SideTypeSell: + return types.SideTypeSell + + default: + log.Errorf("can not convert futures side type, unknown side type: %q", side) + return "" + } +} + +func toGlobalFuturesOrderType(orderType futures.OrderType) types.OrderType { + switch orderType { + // TODO + case futures.OrderTypeLimit: // , futures.OrderTypeLimitMaker, futures.OrderTypeTakeProfitLimit: + return types.OrderTypeLimit + + case futures.OrderTypeMarket: + return types.OrderTypeMarket + // TODO + // case futures.OrderTypeStopLossLimit: + // return types.OrderTypeStopLimit + // TODO + // case futures.OrderTypeStopLoss: + // return types.OrderTypeStopMarket + + default: + log.Errorf("unsupported order type: %v", orderType) + return "" + } +} + +func toGlobalFuturesOrderStatus(orderStatus futures.OrderStatusType) types.OrderStatus { + switch orderStatus { + case futures.OrderStatusTypeNew: + return types.OrderStatusNew + + case futures.OrderStatusTypeRejected: + return types.OrderStatusRejected + + case futures.OrderStatusTypeCanceled: + return types.OrderStatusCanceled + + case futures.OrderStatusTypePartiallyFilled: + return types.OrderStatusPartiallyFilled + + case futures.OrderStatusTypeFilled: + return types.OrderStatusFilled + } + + return types.OrderStatus(orderStatus) +} + +func convertPremiumIndex(index *futures.PremiumIndex) (*types.PremiumIndex, error) { + markPrice, err := fixedpoint.NewFromString(index.MarkPrice) + if err != nil { + return nil, err + } + + lastFundingRate, err := fixedpoint.NewFromString(index.LastFundingRate) + if err != nil { + return nil, err + } + + nextFundingTime := time.Unix(0, index.NextFundingTime*int64(time.Millisecond)) + t := time.Unix(0, index.Time*int64(time.Millisecond)) + + return &types.PremiumIndex{ + Symbol: index.Symbol, + MarkPrice: markPrice, + NextFundingTime: nextFundingTime, + LastFundingRate: lastFundingRate, + Time: t, + }, nil +} + +func convertPositionRisk(risk *futures.PositionRisk) (*types.PositionRisk, error) { + leverage, err := fixedpoint.NewFromString(risk.Leverage) + if err != nil { + return nil, err + } + + liquidationPrice, err := fixedpoint.NewFromString(risk.LiquidationPrice) + if err != nil { + return nil, err + } + + return &types.PositionRisk{ + Leverage: leverage, + LiquidationPrice: liquidationPrice, + }, nil +} diff --git a/pkg/exchange/binance/convert_margin.go b/pkg/exchange/binance/convert_margin.go new file mode 100644 index 0000000000..e04bad07e1 --- /dev/null +++ b/pkg/exchange/binance/convert_margin.go @@ -0,0 +1,137 @@ +package binance + +import ( + "github.com/adshao/go-binance/v2" + + "github.com/c9s/bbgo/pkg/exchange/binance/binanceapi" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func toGlobalLoan(record binanceapi.MarginLoanRecord) types.MarginLoan { + return types.MarginLoan{ + Exchange: types.ExchangeBinance, + TransactionID: uint64(record.TxId), + Asset: record.Asset, + Principle: record.Principal, + Time: types.Time(record.Timestamp), + IsolatedSymbol: record.IsolatedSymbol, + } +} + +func toGlobalRepay(record binanceapi.MarginRepayRecord) types.MarginRepay { + return types.MarginRepay{ + Exchange: types.ExchangeBinance, + TransactionID: record.TxId, + Asset: record.Asset, + Principle: record.Principal, + Time: types.Time(record.Timestamp), + IsolatedSymbol: record.IsolatedSymbol, + } +} + +func toGlobalInterest(record binanceapi.MarginInterest) types.MarginInterest { + return types.MarginInterest{ + Exchange: types.ExchangeBinance, + Asset: record.Asset, + Principle: record.Principal, + Interest: record.Interest, + InterestRate: record.InterestRate, + IsolatedSymbol: record.IsolatedSymbol, + Time: types.Time(record.InterestAccuredTime), + } +} + +func toGlobalLiquidation(record binanceapi.MarginLiquidationRecord) types.MarginLiquidation { + return types.MarginLiquidation{ + Exchange: types.ExchangeBinance, + AveragePrice: record.AveragePrice, + ExecutedQuantity: record.ExecutedQuantity, + OrderID: record.OrderId, + Price: record.Price, + Quantity: record.Quantity, + Side: toGlobalSideType(record.Side), + Symbol: record.Symbol, + TimeInForce: types.TimeInForce(record.TimeInForce), + IsIsolated: record.IsIsolated, + UpdatedTime: types.Time(record.UpdatedTime), + } +} + +func toGlobalIsolatedUserAsset(userAsset binance.IsolatedUserAsset) types.IsolatedUserAsset { + return types.IsolatedUserAsset{ + Asset: userAsset.Asset, + Borrowed: fixedpoint.MustNewFromString(userAsset.Borrowed), + Free: fixedpoint.MustNewFromString(userAsset.Free), + Interest: fixedpoint.MustNewFromString(userAsset.Interest), + Locked: fixedpoint.MustNewFromString(userAsset.Locked), + NetAsset: fixedpoint.MustNewFromString(userAsset.NetAsset), + NetAssetOfBtc: fixedpoint.MustNewFromString(userAsset.NetAssetOfBtc), + BorrowEnabled: userAsset.BorrowEnabled, + RepayEnabled: userAsset.RepayEnabled, + TotalAsset: fixedpoint.MustNewFromString(userAsset.TotalAsset), + } +} + +func toGlobalIsolatedMarginAsset(asset binance.IsolatedMarginAsset) types.IsolatedMarginAsset { + return types.IsolatedMarginAsset{ + Symbol: asset.Symbol, + QuoteAsset: toGlobalIsolatedUserAsset(asset.QuoteAsset), + BaseAsset: toGlobalIsolatedUserAsset(asset.BaseAsset), + IsolatedCreated: asset.IsolatedCreated, + MarginLevel: fixedpoint.MustNewFromString(asset.MarginLevel), + MarginLevelStatus: asset.MarginLevelStatus, + MarginRatio: fixedpoint.MustNewFromString(asset.MarginRatio), + IndexPrice: fixedpoint.MustNewFromString(asset.IndexPrice), + LiquidatePrice: fixedpoint.MustNewFromString(asset.LiquidatePrice), + LiquidateRate: fixedpoint.MustNewFromString(asset.LiquidateRate), + TradeEnabled: false, + } +} + +func toGlobalIsolatedMarginAssets(assets []binance.IsolatedMarginAsset) (retAssets types.IsolatedMarginAssetMap) { + retMarginAssets := make(types.IsolatedMarginAssetMap) + for _, marginAsset := range assets { + retMarginAssets[marginAsset.Symbol] = toGlobalIsolatedMarginAsset(marginAsset) + } + + return retMarginAssets +} + +func toGlobalMarginUserAssets(assets []binance.UserAsset) types.MarginAssetMap { + retMarginAssets := make(types.MarginAssetMap) + for _, marginAsset := range assets { + retMarginAssets[marginAsset.Asset] = types.MarginUserAsset{ + Asset: marginAsset.Asset, + Borrowed: fixedpoint.MustNewFromString(marginAsset.Borrowed), + Free: fixedpoint.MustNewFromString(marginAsset.Free), + Interest: fixedpoint.MustNewFromString(marginAsset.Interest), + Locked: fixedpoint.MustNewFromString(marginAsset.Locked), + NetAsset: fixedpoint.MustNewFromString(marginAsset.NetAsset), + } + } + + return retMarginAssets +} + +func toGlobalMarginAccountInfo(account *binance.MarginAccount) *types.MarginAccountInfo { + return &types.MarginAccountInfo{ + BorrowEnabled: account.BorrowEnabled, + MarginLevel: fixedpoint.MustNewFromString(account.MarginLevel), + TotalAssetOfBTC: fixedpoint.MustNewFromString(account.TotalAssetOfBTC), + TotalLiabilityOfBTC: fixedpoint.MustNewFromString(account.TotalLiabilityOfBTC), + TotalNetAssetOfBTC: fixedpoint.MustNewFromString(account.TotalNetAssetOfBTC), + TradeEnabled: account.TradeEnabled, + TransferEnabled: account.TransferEnabled, + Assets: toGlobalMarginUserAssets(account.UserAssets), + } +} + +func toGlobalIsolatedMarginAccountInfo(account *binance.IsolatedMarginAccount) *types.IsolatedMarginAccountInfo { + return &types.IsolatedMarginAccountInfo{ + TotalAssetOfBTC: fixedpoint.MustNewFromString(account.TotalAssetOfBTC), + TotalLiabilityOfBTC: fixedpoint.MustNewFromString(account.TotalLiabilityOfBTC), + TotalNetAssetOfBTC: fixedpoint.MustNewFromString(account.TotalNetAssetOfBTC), + Assets: toGlobalIsolatedMarginAssets(account.Assets), + } +} diff --git a/pkg/exchange/binance/depthframe.go b/pkg/exchange/binance/depthframe.go deleted file mode 100644 index c4f22cf751..0000000000 --- a/pkg/exchange/binance/depthframe.go +++ /dev/null @@ -1,160 +0,0 @@ -package binance - -import ( - "context" - "math/rand" - "sync" - "time" - - "github.com/adshao/go-binance/v2" -) - -//go:generate callbackgen -type DepthFrame -type DepthFrame struct { - client *binance.Client - context context.Context - - mu sync.Mutex - once sync.Once - SnapshotDepth *DepthEvent - Symbol string - BufEvents []DepthEvent - - readyCallbacks []func(snapshotDepth DepthEvent, bufEvents []DepthEvent) - pushCallbacks []func(e DepthEvent) -} - -func (f *DepthFrame) Reset() { - f.mu.Lock() - f.SnapshotDepth = nil - f.BufEvents = nil - f.mu.Unlock() -} - -func (f *DepthFrame) loadDepthSnapshot() { - if debugBinanceDepth { - log.Infof("loading %s depth from the restful api", f.Symbol) - } - - depth, err := f.fetch(f.context) - if err != nil { - return - } - - f.mu.Lock() - - // filter the events by the event IDs - var events []DepthEvent - for _, e := range f.BufEvents { - if e.FirstUpdateID <= depth.FinalUpdateID || e.FinalUpdateID <= depth.FinalUpdateID { - continue - } - - events = append(events, e) - } - - // since we're buffering the update events, ideally the some of the head events - // should be older than the received depth snapshot. - // if the head event is newer than the depth we got, - // then there are something missed, we need to restart the process. - if len(events) > 0 { - e := events[0] - if e.FirstUpdateID > depth.FinalUpdateID+1 { - log.Warn("miss matched final update id for order book") - f.SnapshotDepth = nil - f.BufEvents = nil - f.mu.Unlock() - return - } - } - - f.SnapshotDepth = depth - f.BufEvents = nil - f.mu.Unlock() - - f.EmitReady(*depth, events) -} - -func (f *DepthFrame) PushEvent(e DepthEvent) { - f.mu.Lock() - - // before the snapshot is loaded, we need to buffer the events until we loaded the snapshot. - if f.SnapshotDepth == nil { - // buffer the events until we loaded the snapshot - f.BufEvents = append(f.BufEvents, e) - f.mu.Unlock() - - f.loadDepthSnapshot() - - // start a worker to update the snapshot periodically. - go f.once.Do(func() { - if debugBinanceDepth { - log.Infof("starting depth snapshot updater for %s market", f.Symbol) - } - - ticker := time.NewTicker(1*time.Minute + time.Duration(rand.Intn(10))*time.Millisecond) - defer ticker.Stop() - for { - select { - case <-f.context.Done(): - return - - case <-ticker.C: - f.loadDepthSnapshot() - } - } - }) - } else { - // if we have the snapshot, we could use that final update ID filter the events - - // drop any update ID < the final update ID - if e.FinalUpdateID < f.SnapshotDepth.FinalUpdateID { - f.mu.Unlock() - return - } - - // if the first update ID > final update ID + 1, it means something is missing, we need to reload. - if e.FirstUpdateID > f.SnapshotDepth.FinalUpdateID+1 { - if debugBinanceDepth { - log.Warnf("event first update id %d > final update id + 1 (%d), resetting snapshot", e.FirstUpdateID, f.SnapshotDepth.FirstUpdateID+1) - } - - f.SnapshotDepth = nil - f.mu.Unlock() - return - } - - // update the final update ID, so that we can check the next event - f.SnapshotDepth.FinalUpdateID = e.FinalUpdateID - f.mu.Unlock() - - f.EmitPush(e) - } -} - -// fetch fetches the depth and convert to the depth event so that we can reuse the event structure to convert it to the global orderbook type -func (f *DepthFrame) fetch(ctx context.Context) (*DepthEvent, error) { - if debugBinanceDepth { - log.Infof("fetching %s depth snapshot", f.Symbol) - } - - response, err := f.client.NewDepthService().Symbol(f.Symbol).Do(ctx) - if err != nil { - return nil, err - } - - event := DepthEvent{ - FirstUpdateID: 0, - FinalUpdateID: response.LastUpdateID, - } - - for _, entry := range response.Bids { - event.Bids = append(event.Bids, DepthEntry{PriceLevel: entry.Price, Quantity: entry.Quantity}) - } - - for _, entry := range response.Asks { - event.Asks = append(event.Asks, DepthEntry{PriceLevel: entry.Price, Quantity: entry.Quantity}) - } - - return &event, nil -} diff --git a/pkg/exchange/binance/depthframe_callbacks.go b/pkg/exchange/binance/depthframe_callbacks.go deleted file mode 100644 index df45c16b4c..0000000000 --- a/pkg/exchange/binance/depthframe_callbacks.go +++ /dev/null @@ -1,25 +0,0 @@ -// Code generated by "callbackgen -type DepthFrame"; DO NOT EDIT. - -package binance - -import () - -func (f *DepthFrame) OnReady(cb func(snapshotDepth DepthEvent, bufEvents []DepthEvent)) { - f.readyCallbacks = append(f.readyCallbacks, cb) -} - -func (f *DepthFrame) EmitReady(snapshotDepth DepthEvent, bufEvents []DepthEvent) { - for _, cb := range f.readyCallbacks { - cb(snapshotDepth, bufEvents) - } -} - -func (f *DepthFrame) OnPush(cb func(e DepthEvent)) { - f.pushCallbacks = append(f.pushCallbacks, cb) -} - -func (f *DepthFrame) EmitPush(e DepthEvent) { - for _, cb := range f.pushCallbacks { - cb(e) - } -} diff --git a/pkg/exchange/binance/exchange.go b/pkg/exchange/binance/exchange.go index 6e662138d9..9dc464e653 100644 --- a/pkg/exchange/binance/exchange.go +++ b/pkg/exchange/binance/exchange.go @@ -5,19 +5,42 @@ import ( "fmt" "os" "strconv" + "strings" + "sync" "time" + "github.com/adshao/go-binance/v2/futures" + "github.com/spf13/viper" + + "go.uber.org/multierr" + + "golang.org/x/time/rate" + "github.com/adshao/go-binance/v2" "github.com/google/uuid" "github.com/pkg/errors" - "github.com/sirupsen/logrus" + "github.com/c9s/bbgo/pkg/exchange/binance/binanceapi" "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" "github.com/c9s/bbgo/pkg/util" ) +const BNB = "BNB" + +const BinanceUSBaseURL = "https://api.binance.us" +const BinanceTestBaseURL = "https://testnet.binance.vision" +const BinanceUSWebSocketURL = "wss://stream.binance.us:9443" +const WebSocketURL = "wss://stream.binance.com:9443" +const WebSocketTestURL = "wss://testnet.binance.vision" +const FutureTestBaseURL = "https://testnet.binancefuture.com" +const FuturesWebSocketURL = "wss://fstream.binance.com" +const FuturesWebSocketTestURL = "wss://stream.binancefuture.com" + +// 5 per second and a 2 initial bucket +var orderLimiter = rate.NewLimiter(5, 2) + var log = logrus.WithFields(logrus.Fields{ "exchange": "binance", }) @@ -25,22 +48,86 @@ var log = logrus.WithFields(logrus.Fields{ func init() { _ = types.Exchange(&Exchange{}) _ = types.MarginExchange(&Exchange{}) + _ = types.FuturesExchange(&Exchange{}) + // FIXME: this is not effected since dotenv is loaded in the rootCmd, not in the init function if ok, _ := strconv.ParseBool(os.Getenv("DEBUG_BINANCE_STREAM")); ok { log.Level = logrus.DebugLevel } } +func isBinanceUs() bool { + v, err := strconv.ParseBool(os.Getenv("BINANCE_US")) + return err == nil && v +} + +func paperTrade() bool { + v, ok := util.GetEnvVarBool("PAPER_TRADE") + return ok && v +} + type Exchange struct { types.MarginSettings + types.FuturesSettings + + key, secret string + // client is used for spot & margin + client *binance.Client + + // futuresClient is used for usdt-m futures + futuresClient *futures.Client // USDT-M Futures + // deliveryClient *delivery.Client // Coin-M Futures - Client *binance.Client + // client2 is a newer version of the binance api client implemented by ourselves. + client2 *binanceapi.RestClient } +var timeSetter sync.Once + func New(key, secret string) *Exchange { var client = binance.NewClient(key, secret) + client.HTTPClient = binanceapi.DefaultHttpClient + client.Debug = viper.GetBool("debug-binance-client") + + var futuresClient = binance.NewFuturesClient(key, secret) + futuresClient.HTTPClient = binanceapi.DefaultHttpClient + futuresClient.Debug = viper.GetBool("debug-binance-futures-client") + + if isBinanceUs() { + client.BaseURL = BinanceUSBaseURL + } + + if paperTrade() { + client.BaseURL = BinanceTestBaseURL + futuresClient.BaseURL = FutureTestBaseURL + } + + client2 := binanceapi.NewClient(client.BaseURL) + + var err error + if len(key) > 0 && len(secret) > 0 { + client2.Auth(key, secret) + + timeSetter.Do(func() { + _, err = client.NewSetServerTimeService().Do(context.Background()) + if err != nil { + log.WithError(err).Error("can not set server time") + } + + _, err = futuresClient.NewSetServerTimeService().Do(context.Background()) + if err != nil { + log.WithError(err).Error("can not set server time") + } + }) + } + return &Exchange{ - Client: client, + key: key, + // pragma: allowlist nextline secret + secret: secret, + client: client, + futuresClient: futuresClient, + client2: client2, } } @@ -48,210 +135,446 @@ func (e *Exchange) Name() types.ExchangeName { return types.ExchangeBinance } -func (e *Exchange) QueryMarkets(ctx context.Context) (types.MarketMap, error) { - log.Info("querying market info...") +func (e *Exchange) QueryTicker(ctx context.Context, symbol string) (*types.Ticker, error) { + if e.IsFutures { + req := e.futuresClient.NewListPriceChangeStatsService() + req.Symbol(strings.ToUpper(symbol)) + stats, err := req.Do(ctx) + if err != nil { + return nil, err + } - exchangeInfo, err := e.Client.NewExchangeInfoService().Do(ctx) + return toGlobalFuturesTicker(stats[0]) + } + req := e.client.NewListPriceChangeStatsService() + req.Symbol(strings.ToUpper(symbol)) + stats, err := req.Do(ctx) if err != nil { return nil, err } - markets := types.MarketMap{} - for _, symbol := range exchangeInfo.Symbols { - market := types.Market{ - Symbol: symbol.Symbol, - PricePrecision: symbol.QuotePrecision, - VolumePrecision: symbol.BaseAssetPrecision, - QuoteCurrency: symbol.QuoteAsset, - BaseCurrency: symbol.BaseAsset, + return toGlobalTicker(stats[0]) +} + +func (e *Exchange) QueryTickers(ctx context.Context, symbol ...string) (map[string]types.Ticker, error) { + var tickers = make(map[string]types.Ticker) + + if len(symbol) == 1 { + ticker, err := e.QueryTicker(ctx, symbol[0]) + if err != nil { + return nil, err } - if f := symbol.MinNotionalFilter(); f != nil { - market.MinNotional = util.MustParseFloat(f.MinNotional) - market.MinAmount = util.MustParseFloat(f.MinNotional) + tickers[strings.ToUpper(symbol[0])] = *ticker + return tickers, nil + } + + m := make(map[string]struct{}) + exists := struct{}{} + + for _, s := range symbol { + m[s] = exists + } + + if e.IsFutures { + var req = e.futuresClient.NewListPriceChangeStatsService() + changeStats, err := req.Do(ctx) + if err != nil { + return nil, err } + for _, stats := range changeStats { + if _, ok := m[stats.Symbol]; len(symbol) != 0 && !ok { + continue + } + + tick := types.Ticker{ + Volume: fixedpoint.MustNewFromString(stats.Volume), + Last: fixedpoint.MustNewFromString(stats.LastPrice), + Open: fixedpoint.MustNewFromString(stats.OpenPrice), + High: fixedpoint.MustNewFromString(stats.HighPrice), + Low: fixedpoint.MustNewFromString(stats.LowPrice), + Buy: fixedpoint.MustNewFromString(stats.LastPrice), + Sell: fixedpoint.MustNewFromString(stats.LastPrice), + Time: time.Unix(0, stats.CloseTime*int64(time.Millisecond)), + } - // The LOT_SIZE filter defines the quantity (aka "lots" in auction terms) rules for a symbol. - // There are 3 parts: - // minQty defines the minimum quantity/icebergQty allowed. - // maxQty defines the maximum quantity/icebergQty allowed. - // stepSize defines the intervals that a quantity/icebergQty can be increased/decreased by. - if f := symbol.LotSizeFilter(); f != nil { - market.MinLot = util.MustParseFloat(f.MinQuantity) - market.MinQuantity = util.MustParseFloat(f.MinQuantity) - market.MaxQuantity = util.MustParseFloat(f.MaxQuantity) - // market.StepSize = util.MustParseFloat(f.StepSize) + tickers[stats.Symbol] = tick } - if f := symbol.PriceFilter(); f != nil { - market.MaxPrice = util.MustParseFloat(f.MaxPrice) - market.MinPrice = util.MustParseFloat(f.MinPrice) - market.TickSize = util.MustParseFloat(f.TickSize) + return tickers, nil + } + + var req = e.client.NewListPriceChangeStatsService() + changeStats, err := req.Do(ctx) + if err != nil { + return nil, err + } + + for _, stats := range changeStats { + if _, ok := m[stats.Symbol]; len(symbol) != 0 && !ok { + continue + } + + tick := types.Ticker{ + Volume: fixedpoint.MustNewFromString(stats.Volume), + Last: fixedpoint.MustNewFromString(stats.LastPrice), + Open: fixedpoint.MustNewFromString(stats.OpenPrice), + High: fixedpoint.MustNewFromString(stats.HighPrice), + Low: fixedpoint.MustNewFromString(stats.LowPrice), + Buy: fixedpoint.MustNewFromString(stats.BidPrice), + Sell: fixedpoint.MustNewFromString(stats.AskPrice), + Time: time.Unix(0, stats.CloseTime*int64(time.Millisecond)), } - markets[symbol.Symbol] = market + tickers[stats.Symbol] = tick + } + + return tickers, nil +} + +func (e *Exchange) QueryMarkets(ctx context.Context) (types.MarketMap, error) { + + if e.IsFutures { + exchangeInfo, err := e.futuresClient.NewExchangeInfoService().Do(ctx) + if err != nil { + return nil, err + } + + markets := types.MarketMap{} + for _, symbol := range exchangeInfo.Symbols { + markets[symbol.Symbol] = toGlobalFuturesMarket(symbol) + } + + return markets, nil + } + + exchangeInfo, err := e.client.NewExchangeInfoService().Do(ctx) + if err != nil { + return nil, err + } + + markets := types.MarketMap{} + for _, symbol := range exchangeInfo.Symbols { + markets[symbol.Symbol] = toGlobalMarket(symbol) } return markets, nil } -func (e *Exchange) QueryAveragePrice(ctx context.Context, symbol string) (float64, error) { - resp, err := e.Client.NewAveragePriceService().Symbol(symbol).Do(ctx) +func (e *Exchange) QueryAveragePrice(ctx context.Context, symbol string) (fixedpoint.Value, error) { + resp, err := e.client.NewAveragePriceService().Symbol(symbol).Do(ctx) if err != nil { - return 0, err + return fixedpoint.Zero, err } - return util.MustParseFloat(resp.Price), nil + return fixedpoint.MustNewFromString(resp.Price), nil } func (e *Exchange) NewStream() types.Stream { - stream := NewStream(e.Client) + stream := NewStream(e, e.client, e.futuresClient) stream.MarginSettings = e.MarginSettings + stream.FuturesSettings = e.FuturesSettings return stream } -func (e *Exchange) QueryMarginAccount(ctx context.Context) (*binance.MarginAccount, error) { - return e.Client.NewGetMarginAccountService().Do(ctx) +func (e *Exchange) QueryMarginAssetMaxBorrowable(ctx context.Context, asset string) (amount fixedpoint.Value, err error) { + req := e.client.NewGetMaxBorrowableService() + req.Asset(asset) + if e.IsIsolatedMargin { + req.IsolatedSymbol(e.IsolatedMarginSymbol) + } + resp, err := req.Do(ctx) + if err != nil { + return fixedpoint.Zero, err + } + + return fixedpoint.NewFromString(resp.Amount) } -func (e *Exchange) QueryIsolatedMarginAccount(ctx context.Context, symbols ...string) (*binance.IsolatedMarginAccount, error) { - req := e.Client.NewGetIsolatedMarginAccountService() - if len(symbols) > 0 { - req.Symbols(symbols...) +func (e *Exchange) RepayMarginAsset(ctx context.Context, asset string, amount fixedpoint.Value) error { + req := e.client.NewMarginRepayService() + req.Asset(asset) + req.Amount(amount.String()) + if e.IsIsolatedMargin { + req.IsolatedSymbol(e.IsolatedMarginSymbol) + } + + log.Infof("repaying margin asset %s amount %f", asset, amount.Float64()) + resp, err := req.Do(ctx) + if err != nil { + return err } - return req.Do(ctx) + log.Debugf("margin repayed %f %s, transaction id = %d", amount.Float64(), asset, resp.TranID) + return err } -func (e *Exchange) QueryWithdrawHistory(ctx context.Context, asset string, since, until time.Time) (allWithdraws []types.Withdraw, err error) { +func (e *Exchange) BorrowMarginAsset(ctx context.Context, asset string, amount fixedpoint.Value) error { + req := e.client.NewMarginLoanService() + req.Asset(asset) + req.Amount(amount.String()) + if e.IsIsolatedMargin { + req.IsolatedSymbol(e.IsolatedMarginSymbol) + } - startTime := since - txIDs := map[string]struct{}{} + log.Infof("borrowing margin asset %s amount %f", asset, amount.Float64()) + resp, err := req.Do(ctx) + if err != nil { + return err + } + log.Debugf("margin borrowed %f %s, transaction id = %d", amount.Float64(), asset, resp.TranID) + return err +} - for startTime.Before(until) { - // startTime ~ endTime must be in 90 days - endTime := startTime.AddDate(0, 0, 60) - if endTime.After(until) { - endTime = until - } +func (e *Exchange) QueryMarginBorrowHistory(ctx context.Context, asset string) error { + req := e.client.NewListMarginLoansService() + req.Asset(asset) + history, err := req.Do(ctx) + if err != nil { + return err + } + _ = history + return nil +} - req := e.Client.NewListWithdrawsService() - if len(asset) > 0 { - req.Asset(asset) - } +// transferCrossMarginAccountAsset transfer asset to the cross margin account or to the main account +func (e *Exchange) transferCrossMarginAccountAsset(ctx context.Context, asset string, amount fixedpoint.Value, io int) error { + req := e.client.NewMarginTransferService() + req.Asset(asset) + req.Amount(amount.String()) - withdraws, err := req. - StartTime(startTime.UnixNano() / int64(time.Millisecond)). - EndTime(endTime.UnixNano() / int64(time.Millisecond)). - Do(ctx) + if io > 0 { // in + req.Type(binance.MarginTransferTypeToMargin) + } else if io < 0 { // out + req.Type(binance.MarginTransferTypeToMain) + } + resp, err := req.Do(ctx) + if err != nil { + return err + } - if err != nil { - return allWithdraws, err - } + log.Debugf("cross margin transfer %f %s, transaction id = %d", amount.Float64(), asset, resp.TranID) + return err +} - for _, d := range withdraws { - if _, ok := txIDs[d.TxID]; ok { - continue - } +func (e *Exchange) queryCrossMarginAccount(ctx context.Context) (*types.Account, error) { + marginAccount, err := e.client.NewGetMarginAccountService().Do(ctx) + if err != nil { + return nil, err + } - status := "" - switch d.Status { - case 0: - status = "email_sent" - case 1: - status = "cancelled" - case 2: - status = "awaiting_approval" - case 3: - status = "rejected" - case 4: - status = "processing" - case 5: - status = "failure" - case 6: - status = "completed" - - default: - status = fmt.Sprintf("unsupported code: %d", d.Status) - } + marginLevel := fixedpoint.MustNewFromString(marginAccount.MarginLevel) + a := &types.Account{ + AccountType: types.AccountTypeMargin, + MarginInfo: toGlobalMarginAccountInfo(marginAccount), // In binance GO api, Account define marginAccount info which mantain []*AccountAsset and []*AccountPosition. + MarginLevel: marginLevel, + MarginTolerance: calculateMarginTolerance(marginLevel), + BorrowEnabled: marginAccount.BorrowEnabled, + TransferEnabled: marginAccount.TransferEnabled, + } - txIDs[d.TxID] = struct{}{} - allWithdraws = append(allWithdraws, types.Withdraw{ - ApplyTime: time.Unix(0, d.ApplyTime*int64(time.Millisecond)), - Asset: d.Asset, - Amount: d.Amount, - Address: d.Address, - AddressTag: d.AddressTag, - TransactionID: d.TxID, - TransactionFee: d.TransactionFee, - WithdrawOrderID: d.WithdrawOrderID, - Network: d.Network, - Status: status, - }) + // convert cross margin user assets into balances + balances := types.BalanceMap{} + for _, userAsset := range marginAccount.UserAssets { + balances[userAsset.Asset] = types.Balance{ + Currency: userAsset.Asset, + Available: fixedpoint.MustNewFromString(userAsset.Free), + Locked: fixedpoint.MustNewFromString(userAsset.Locked), + Interest: fixedpoint.MustNewFromString(userAsset.Interest), + Borrowed: fixedpoint.MustNewFromString(userAsset.Borrowed), + NetAsset: fixedpoint.MustNewFromString(userAsset.NetAsset), } + } + a.UpdateBalances(balances) + return a, nil +} + +func (e *Exchange) queryIsolatedMarginAccount(ctx context.Context) (*types.Account, error) { + req := e.client.NewGetIsolatedMarginAccountService() + req.Symbols(e.IsolatedMarginSymbol) + + marginAccount, err := req.Do(ctx) + if err != nil { + return nil, err + } + + a := &types.Account{ + AccountType: types.AccountTypeIsolatedMargin, + IsolatedMarginInfo: toGlobalIsolatedMarginAccountInfo(marginAccount), // In binance GO api, Account define marginAccount info which mantain []*AccountAsset and []*AccountPosition. + } + + if len(marginAccount.Assets) == 0 { + return nil, fmt.Errorf("empty margin account assets, please check your isolatedMarginSymbol is correctly set: %+v", marginAccount) + } + + // for isolated margin account, we will only have one asset in the Assets array. + if len(marginAccount.Assets) > 1 { + return nil, fmt.Errorf("unexpected number of user assets returned, got %d user assets", len(marginAccount.Assets)) + } - startTime = endTime + userAsset := marginAccount.Assets[0] + marginLevel := fixedpoint.MustNewFromString(userAsset.MarginLevel) + a.MarginLevel = marginLevel + a.MarginTolerance = calculateMarginTolerance(marginLevel) + a.MarginRatio = fixedpoint.MustNewFromString(userAsset.MarginRatio) + a.BorrowEnabled = userAsset.BaseAsset.BorrowEnabled || userAsset.QuoteAsset.BorrowEnabled + a.LiquidationPrice = fixedpoint.MustNewFromString(userAsset.LiquidatePrice) + a.LiquidationRate = fixedpoint.MustNewFromString(userAsset.LiquidateRate) + + // Convert user assets into balances + balances := types.BalanceMap{} + balances[userAsset.BaseAsset.Asset] = types.Balance{ + Currency: userAsset.BaseAsset.Asset, + Available: fixedpoint.MustNewFromString(userAsset.BaseAsset.Free), + Locked: fixedpoint.MustNewFromString(userAsset.BaseAsset.Locked), + Interest: fixedpoint.MustNewFromString(userAsset.BaseAsset.Interest), + Borrowed: fixedpoint.MustNewFromString(userAsset.BaseAsset.Borrowed), + NetAsset: fixedpoint.MustNewFromString(userAsset.BaseAsset.NetAsset), } - return allWithdraws, nil + balances[userAsset.QuoteAsset.Asset] = types.Balance{ + Currency: userAsset.QuoteAsset.Asset, + Available: fixedpoint.MustNewFromString(userAsset.QuoteAsset.Free), + Locked: fixedpoint.MustNewFromString(userAsset.QuoteAsset.Locked), + Interest: fixedpoint.MustNewFromString(userAsset.QuoteAsset.Interest), + Borrowed: fixedpoint.MustNewFromString(userAsset.QuoteAsset.Borrowed), + NetAsset: fixedpoint.MustNewFromString(userAsset.QuoteAsset.NetAsset), + } + + a.UpdateBalances(balances) + return a, nil } -func (e *Exchange) QueryDepositHistory(ctx context.Context, asset string, since, until time.Time) (allDeposits []types.Deposit, err error) { - startTime := since - txIDs := map[string]struct{}{} - for startTime.Before(until) { +func (e *Exchange) Withdraw(ctx context.Context, asset string, amount fixedpoint.Value, address string, options *types.WithdrawalOptions) error { + req := e.client2.NewWithdrawRequest() + req.Coin(asset) + req.Address(address) + req.Amount(fmt.Sprintf("%f", amount.Float64())) - // startTime ~ endTime must be in 90 days - endTime := startTime.AddDate(0, 0, 60) - if endTime.After(until) { - endTime = until + if options != nil { + if options.Network != "" { + req.Network(options.Network) } + if options.AddressTag != "" { + req.Network(options.AddressTag) + } + } - req := e.Client.NewListDepositsService() - if len(asset) > 0 { - req.Asset(asset) + response, err := req.Do(ctx) + if err != nil { + return err + } + + log.Infof("withdrawal request sent, response: %+v", response) + return nil +} + +func (e *Exchange) QueryWithdrawHistory(ctx context.Context, asset string, since, until time.Time) (withdraws []types.Withdraw, err error) { + var emptyTime = time.Time{} + if since == emptyTime { + since, err = getLaunchDate() + if err != nil { + return withdraws, err } + } + + // startTime ~ endTime must be in 90 days + historyDayRangeLimit := time.Hour * 24 * 89 + if until.Sub(since) >= historyDayRangeLimit { + until = since.Add(historyDayRangeLimit) + } - deposits, err := req. - StartTime(startTime.UnixNano() / int64(time.Millisecond)). - EndTime(endTime.UnixNano() / int64(time.Millisecond)). - Do(ctx) + req := e.client2.NewGetWithdrawHistoryRequest() + if len(asset) > 0 { + req.Coin(asset) + } + + records, err := req. + StartTime(since). + EndTime(until). + Limit(1000). + Do(ctx) + + if err != nil { + return withdraws, err + } + for _, d := range records { + // time format: 2006-01-02 15:04:05 + applyTime, err := time.Parse("2006-01-02 15:04:05", d.ApplyTime) if err != nil { return nil, err } - for _, d := range deposits { - if _, ok := txIDs[d.TxID]; ok { - continue - } + withdraws = append(withdraws, types.Withdraw{ + Exchange: types.ExchangeBinance, + ApplyTime: types.Time(applyTime), + Asset: d.Coin, + Amount: d.Amount, + Address: d.Address, + TransactionID: d.TxID, + TransactionFee: d.TransactionFee, + WithdrawOrderID: d.WithdrawOrderID, + Network: d.Network, + Status: d.Status.String(), + }) + } - // 0(0:pending,6: credited but cannot withdraw, 1:success) - status := types.DepositStatus(fmt.Sprintf("code: %d", d.Status)) - - switch d.Status { - case 0: - status = types.DepositPending - case 6: - // https://www.binance.com/en/support/faq/115003736451 - status = types.DepositCredited - case 1: - status = types.DepositSuccess - } + return withdraws, nil +} - txIDs[d.TxID] = struct{}{} - allDeposits = append(allDeposits, types.Deposit{ - Time: time.Unix(0, d.InsertTime*int64(time.Millisecond)), - Asset: d.Asset, - Amount: d.Amount, - Address: d.Address, - AddressTag: d.AddressTag, - TransactionID: d.TxID, - Status: status, - }) +func (e *Exchange) QueryDepositHistory(ctx context.Context, asset string, since, until time.Time) (allDeposits []types.Deposit, err error) { + var emptyTime = time.Time{} + if since == emptyTime { + since, err = getLaunchDate() + if err != nil { + return nil, err } + } + + // startTime ~ endTime must be in 90 days + historyDayRangeLimit := time.Hour * 24 * 89 + if until.Sub(since) >= historyDayRangeLimit { + until = since.Add(historyDayRangeLimit) + } - startTime = endTime + req := e.client2.NewGetDepositHistoryRequest() + if len(asset) > 0 { + req.Coin(asset) + } + + req.StartTime(since). + EndTime(until) + + records, err := req.Do(ctx) + if err != nil { + return nil, err + } + + for _, d := range records { + // 0(0:pending,6: credited but cannot withdraw, 1:success) + // set the default status + status := types.DepositStatus(fmt.Sprintf("code: %d", d.Status)) + switch d.Status { + case 0: + status = types.DepositPending + case 6: + // https://www.binance.com/en/support/faq/115003736451 + status = types.DepositCredited + case 1: + status = types.DepositSuccess + } + + allDeposits = append(allDeposits, types.Deposit{ + Exchange: types.ExchangeBinance, + Time: types.Time(d.InsertTime.Time()), + Asset: d.Coin, + Amount: d.Amount, + Address: d.Address, + AddressTag: d.AddressTag, + TransactionID: d.TxId, + Status: status, + }) } return allDeposits, nil @@ -266,13 +589,12 @@ func (e *Exchange) QueryAccountBalances(ctx context.Context) (types.BalanceMap, return account.Balances(), nil } -// PlatformFeeCurrency func (e *Exchange) PlatformFeeCurrency() string { - return "BNB" + return BNB } -func (e *Exchange) QueryAccount(ctx context.Context) (*types.Account, error) { - account, err := e.Client.NewGetAccountService().Do(ctx) +func (e *Exchange) QuerySpotAccount(ctx context.Context) (*types.Account, error) { + account, err := e.client.NewGetAccountService().Do(ctx) if err != nil { return nil, err } @@ -281,22 +603,69 @@ func (e *Exchange) QueryAccount(ctx context.Context) (*types.Account, error) { for _, b := range account.Balances { balances[b.Asset] = types.Balance{ Currency: b.Asset, - Available: fixedpoint.Must(fixedpoint.NewFromString(b.Free)), - Locked: fixedpoint.Must(fixedpoint.NewFromString(b.Locked)), + Available: fixedpoint.MustNewFromString(b.Free), + Locked: fixedpoint.MustNewFromString(b.Locked), } } a := &types.Account{ - MakerCommission: int(account.MakerCommission), - TakerCommission: int(account.TakerCommission), + AccountType: types.AccountTypeSpot, + CanDeposit: account.CanDeposit, // if can transfer in asset + CanTrade: account.CanTrade, // if can trade + CanWithdraw: account.CanWithdraw, // if can transfer out asset } a.UpdateBalances(balances) return a, nil } +func (e *Exchange) QueryFuturesAccount(ctx context.Context) (*types.Account, error) { + account, err := e.futuresClient.NewGetAccountService().Do(ctx) + if err != nil { + return nil, err + } + accountBalances, err := e.futuresClient.NewGetBalanceService().Do(ctx) + if err != nil { + return nil, err + } + + var balances = map[string]types.Balance{} + for _, b := range accountBalances { + balances[b.Asset] = types.Balance{ + Currency: b.Asset, + Available: fixedpoint.Must(fixedpoint.NewFromString(b.AvailableBalance)), + } + } + + a := &types.Account{ + AccountType: types.AccountTypeFutures, + FuturesInfo: toGlobalFuturesAccountInfo(account), // In binance GO api, Account define account info which mantain []*AccountAsset and []*AccountPosition. + CanDeposit: account.CanDeposit, // if can transfer in asset + CanTrade: account.CanTrade, // if can trade + CanWithdraw: account.CanWithdraw, // if can transfer out asset + } + a.UpdateBalances(balances) + return a, nil +} + +func (e *Exchange) QueryAccount(ctx context.Context) (*types.Account, error) { + var account *types.Account + var err error + if e.IsFutures { + account, err = e.QueryFuturesAccount(ctx) + } else if e.IsIsolatedMargin { + account, err = e.queryIsolatedMarginAccount(ctx) + } else if e.IsMargin { + account, err = e.queryCrossMarginAccount(ctx) + } else { + account, err = e.QuerySpotAccount(ctx) + } + + return account, err +} + func (e *Exchange) QueryOpenOrders(ctx context.Context, symbol string) (orders []types.Order, err error) { if e.IsMargin { - req := e.Client.NewListMarginOpenOrdersService().Symbol(symbol) + req := e.client.NewListMarginOpenOrdersService().Symbol(symbol) req.IsIsolated(e.IsIsolatedMargin) binanceOrders, err := req.Do(ctx) @@ -304,33 +673,75 @@ func (e *Exchange) QueryOpenOrders(ctx context.Context, symbol string) (orders [ return orders, err } - return ToGlobalOrders(binanceOrders) + return toGlobalOrders(binanceOrders) } - binanceOrders, err := e.Client.NewListOpenOrdersService().Symbol(symbol).Do(ctx) + if e.IsFutures { + req := e.futuresClient.NewListOpenOrdersService().Symbol(symbol) + + binanceOrders, err := req.Do(ctx) + if err != nil { + return orders, err + } + + return toGlobalFuturesOrders(binanceOrders) + } + + binanceOrders, err := e.client.NewListOpenOrdersService().Symbol(symbol).Do(ctx) if err != nil { return orders, err } - return ToGlobalOrders(binanceOrders) + return toGlobalOrders(binanceOrders) +} + +func (e *Exchange) QueryOrder(ctx context.Context, q types.OrderQuery) (*types.Order, error) { + orderID, err := strconv.ParseInt(q.OrderID, 10, 64) + if err != nil { + return nil, err + } + + var order *binance.Order + if e.IsMargin { + order, err = e.client.NewGetMarginOrderService().Symbol(q.Symbol).OrderID(orderID).Do(ctx) + } else { + order, err = e.client.NewGetOrderService().Symbol(q.Symbol).OrderID(orderID).Do(ctx) + } + + if err != nil { + return nil, err + } + + return toGlobalOrder(order, e.IsMargin) } func (e *Exchange) QueryClosedOrders(ctx context.Context, symbol string, since, until time.Time, lastOrderID uint64) (orders []types.Order, err error) { - if until.Sub(since) >= 24*time.Hour { - until = since.Add(24*time.Hour - time.Millisecond) + // we can only query orders within 24 hours + // if the until-since is more than 24 hours, we should reset the until to: + // new until = since + 24 hours - 1 millisecond + /* + if until.Sub(since) >= 24*time.Hour { + until = since.Add(24*time.Hour - time.Millisecond) + } + */ + + if err := orderLimiter.Wait(ctx); err != nil { + log.WithError(err).Errorf("order rate limiter wait error") } log.Infof("querying closed orders %s from %s <=> %s ...", symbol, since, until) if e.IsMargin { - req := e.Client.NewListMarginOrdersService().Symbol(symbol) + req := e.client.NewListMarginOrdersService().Symbol(symbol) req.IsIsolated(e.IsIsolatedMargin) if lastOrderID > 0 { req.OrderID(int64(lastOrderID)) } else { - req.StartTime(since.UnixNano() / int64(time.Millisecond)). - EndTime(until.UnixNano() / int64(time.Millisecond)) + req.StartTime(since.UnixNano() / int64(time.Millisecond)) + if until.Sub(since) < 24*time.Hour { + req.EndTime(until.UnixNano() / int64(time.Millisecond)) + } } binanceOrders, err := req.Do(ctx) @@ -338,48 +749,129 @@ func (e *Exchange) QueryClosedOrders(ctx context.Context, symbol string, since, return orders, err } - return ToGlobalOrders(binanceOrders) + return toGlobalOrders(binanceOrders) + } + + if e.IsFutures { + req := e.futuresClient.NewListOrdersService().Symbol(symbol) + + if lastOrderID > 0 { + req.OrderID(int64(lastOrderID)) + } else { + req.StartTime(since.UnixNano() / int64(time.Millisecond)) + if until.Sub(since) < 24*time.Hour { + req.EndTime(until.UnixNano() / int64(time.Millisecond)) + } + } + + binanceOrders, err := req.Do(ctx) + if err != nil { + return orders, err + } + return toGlobalFuturesOrders(binanceOrders) } - req := e.Client.NewListOrdersService(). + // If orderId is set, it will get orders >= that orderId. Otherwise most recent orders are returned. + // For some historical orders cummulativeQuoteQty will be < 0, meaning the data is not available at this time. + // If startTime and/or endTime provided, orderId is not required. + req := e.client.NewListOrdersService(). Symbol(symbol) if lastOrderID > 0 { req.OrderID(int64(lastOrderID)) } else { - req.StartTime(since.UnixNano() / int64(time.Millisecond)). - EndTime(until.UnixNano() / int64(time.Millisecond)) + req.StartTime(since.UnixNano() / int64(time.Millisecond)) + if until.Sub(since) < 24*time.Hour { + req.EndTime(until.UnixNano() / int64(time.Millisecond)) + } } + // default 500, max 1000 + req.Limit(1000) + binanceOrders, err := req.Do(ctx) if err != nil { return orders, err } - return ToGlobalOrders(binanceOrders) + return toGlobalOrders(binanceOrders) } -func (e *Exchange) CancelOrders(ctx context.Context, orders ...types.Order) (err2 error) { - for _, o := range orders { - var req = e.Client.NewCancelOrderService() +func (e *Exchange) CancelOrders(ctx context.Context, orders ...types.Order) (err error) { + if err := orderLimiter.Wait(ctx); err != nil { + log.WithError(err).Errorf("order rate limiter wait error") + } + + if e.IsFutures { + for _, o := range orders { + var req = e.futuresClient.NewCancelOrderService() - // Mandatory - req.Symbol(o.Symbol) + // Mandatory + req.Symbol(o.Symbol) - if o.OrderID > 0 { - req.OrderID(int64(o.OrderID)) - } else if len(o.ClientOrderID) > 0 { - req.NewClientOrderID(o.ClientOrderID) + if o.OrderID > 0 { + req.OrderID(int64(o.OrderID)) + } else { + err = multierr.Append(err, types.NewOrderError( + fmt.Errorf("can not cancel %s order, order does not contain orderID or clientOrderID", o.Symbol), + o)) + continue + } + + _, err2 := req.Do(ctx) + if err2 != nil { + err = multierr.Append(err, types.NewOrderError(err2, o)) + } } - _, err := req.Do(ctx) - if err != nil { - log.WithError(err).Errorf("order cancel error") - err2 = err + return err + } + + for _, o := range orders { + if e.IsMargin { + var req = e.client.NewCancelMarginOrderService() + req.IsIsolated(e.IsIsolatedMargin) + req.Symbol(o.Symbol) + + if o.OrderID > 0 { + req.OrderID(int64(o.OrderID)) + } else if len(o.ClientOrderID) > 0 { + req.OrigClientOrderID(o.ClientOrderID) + } else { + err = multierr.Append(err, types.NewOrderError( + fmt.Errorf("can not cancel %s order, order does not contain orderID or clientOrderID", o.Symbol), + o)) + continue + } + + _, err2 := req.Do(ctx) + if err2 != nil { + err = multierr.Append(err, types.NewOrderError(err2, o)) + } + } else { + // SPOT + var req = e.client.NewCancelOrderService() + req.Symbol(o.Symbol) + + if o.OrderID > 0 { + req.OrderID(int64(o.OrderID)) + } else if len(o.ClientOrderID) > 0 { + req.OrigClientOrderID(o.ClientOrderID) + } else { + err = multierr.Append(err, types.NewOrderError( + fmt.Errorf("can not cancel %s order, order does not contain orderID or clientOrderID", o.Symbol), + o)) + continue + } + + _, err2 := req.Do(ctx) + if err2 != nil { + err = multierr.Append(err, types.NewOrderError(err2, o)) + } } } - return err2 + return err } func (e *Exchange) submitMarginOrder(ctx context.Context, order types.SubmitOrder) (*types.Order, error) { @@ -388,16 +880,15 @@ func (e *Exchange) submitMarginOrder(ctx context.Context, order types.SubmitOrde return nil, err } - clientOrderID := uuid.New().String() - if len(order.ClientOrderID) > 0 { - clientOrderID = order.ClientOrderID - } - - req := e.Client.NewCreateMarginOrderService(). + req := e.client.NewCreateMarginOrderService(). Symbol(order.Symbol). Type(orderType). - Side(binance.SideType(order.Side)). - NewClientOrderID(clientOrderID) + Side(binance.SideType(order.Side)) + + clientOrderID := newSpotClientOrderID(order.ClientOrderID) + if len(clientOrderID) > 0 { + req.NewClientOrderID(clientOrderID) + } // use response result format req.NewOrderRespType(binance.NewOrderRespTypeRESULT) @@ -410,35 +901,45 @@ func (e *Exchange) submitMarginOrder(ctx context.Context, order types.SubmitOrde req.SideEffectType(binance.SideEffectType(order.MarginSideEffect)) } - if len(order.QuantityString) > 0 { - req.Quantity(order.QuantityString) - } else if order.Market.Symbol != "" { + if order.Market.Symbol != "" { req.Quantity(order.Market.FormatQuantity(order.Quantity)) } else { - req.Quantity(strconv.FormatFloat(order.Quantity, 'f', 8, 64)) + // TODO report error + req.Quantity(order.Quantity.FormatString(8)) } - if len(order.PriceString) > 0 { - req.Price(order.PriceString) - } else if order.Market.Symbol != "" { - req.Price(order.Market.FormatPrice(order.Price)) - } else { - req.Price(strconv.FormatFloat(order.Price, 'f', 8, 64)) + // set price field for limit orders + switch order.Type { + case types.OrderTypeStopLimit, types.OrderTypeLimit, types.OrderTypeLimitMaker: + if order.Market.Symbol != "" { + req.Price(order.Market.FormatPrice(order.Price)) + } else { + // TODO report error + req.Price(order.Price.FormatString(8)) + } } + // set stop price switch order.Type { + case types.OrderTypeStopLimit, types.OrderTypeStopMarket: - if len(order.StopPriceString) == 0 { - return nil, fmt.Errorf("stop price string can not be empty") + if order.Market.Symbol != "" { + req.StopPrice(order.Market.FormatPrice(order.StopPrice)) + } else { + // TODO report error + req.StopPrice(order.StopPrice.FormatString(8)) } - - req.StopPrice(order.StopPriceString) } // could be IOC or FOK if len(order.TimeInForce) > 0 { // TODO: check the TimeInForce value req.TimeInForce(binance.TimeInForceType(order.TimeInForce)) + } else { + switch order.Type { + case types.OrderTypeLimit, types.OrderTypeStopLimit: + req.TimeInForce(binance.TimeInForceTypeGTC) + } } response, err := req.Do(ctx) @@ -448,7 +949,7 @@ func (e *Exchange) submitMarginOrder(ctx context.Context, order types.SubmitOrde log.Infof("margin order creation response: %+v", response) - createdOrder, err := ToGlobalOrder(&binance.Order{ + createdOrder, err := toGlobalOrder(&binance.Order{ Symbol: response.Symbol, OrderID: response.OrderID, ClientOrderID: response.ClientOrderID, @@ -468,51 +969,215 @@ func (e *Exchange) submitMarginOrder(ctx context.Context, order types.SubmitOrde return createdOrder, err } -func (e *Exchange) submitSpotOrder(ctx context.Context, order types.SubmitOrder) (*types.Order, error) { - orderType, err := toLocalOrderType(order.Type) +func (e *Exchange) submitFuturesOrder(ctx context.Context, order types.SubmitOrder) (*types.Order, error) { + orderType, err := toLocalFuturesOrderType(order.Type) + if err != nil { + return nil, err + } + + req := e.futuresClient.NewCreateOrderService(). + Symbol(order.Symbol). + Type(orderType). + Side(futures.SideType(order.Side)). + ReduceOnly(order.ReduceOnly) + + clientOrderID := newFuturesClientOrderID(order.ClientOrderID) + if len(clientOrderID) > 0 { + req.NewClientOrderID(clientOrderID) + } + + // use response result format + req.NewOrderResponseType(futures.NewOrderRespTypeRESULT) + + if order.Market.Symbol != "" { + req.Quantity(order.Market.FormatQuantity(order.Quantity)) + } else { + // TODO report error + req.Quantity(order.Quantity.FormatString(8)) + } + + // set price field for limit orders + switch order.Type { + case types.OrderTypeStopLimit, types.OrderTypeLimit, types.OrderTypeLimitMaker: + if order.Market.Symbol != "" { + req.Price(order.Market.FormatPrice(order.Price)) + } else { + // TODO report error + req.Price(order.Price.FormatString(8)) + } + } + + // set stop price + switch order.Type { + + case types.OrderTypeStopLimit, types.OrderTypeStopMarket: + if order.Market.Symbol != "" { + req.StopPrice(order.Market.FormatPrice(order.StopPrice)) + } else { + // TODO report error + req.StopPrice(order.StopPrice.FormatString(8)) + } + } + + // could be IOC or FOK + if len(order.TimeInForce) > 0 { + // TODO: check the TimeInForce value + req.TimeInForce(futures.TimeInForceType(order.TimeInForce)) + } else { + switch order.Type { + case types.OrderTypeLimit, types.OrderTypeStopLimit: + req.TimeInForce(futures.TimeInForceTypeGTC) + } + } + + response, err := req.Do(ctx) if err != nil { return nil, err } - clientOrderID := uuid.New().String() - if len(order.ClientOrderID) > 0 { - clientOrderID = order.ClientOrderID + log.Infof("futures order creation response: %+v", response) + + createdOrder, err := toGlobalFuturesOrder(&futures.Order{ + Symbol: response.Symbol, + OrderID: response.OrderID, + ClientOrderID: response.ClientOrderID, + Price: response.Price, + OrigQuantity: response.OrigQuantity, + ExecutedQuantity: response.ExecutedQuantity, + Status: response.Status, + TimeInForce: response.TimeInForce, + Type: response.Type, + Side: response.Side, + ReduceOnly: response.ReduceOnly, + }, true) + + return createdOrder, err +} + +// BBGO is a broker on Binance +const spotBrokerID = "NSUYEBKM" + +func newSpotClientOrderID(originalID string) (clientOrderID string) { + if originalID == types.NoClientOrderID { + return "" + } + + prefix := "x-" + spotBrokerID + prefixLen := len(prefix) + + if originalID != "" { + // try to keep the whole original client order ID if user specifies it. + if prefixLen+len(originalID) > 32 { + return originalID + } + + clientOrderID = prefix + originalID + return clientOrderID + } + + clientOrderID = uuid.New().String() + clientOrderID = prefix + clientOrderID + if len(clientOrderID) > 32 { + return clientOrderID[0:32] + } + + return clientOrderID +} + +// BBGO is a futures broker on Binance +const futuresBrokerID = "gBhMvywy" + +func newFuturesClientOrderID(originalID string) (clientOrderID string) { + if originalID == types.NoClientOrderID { + return "" + } + + prefix := "x-" + futuresBrokerID + prefixLen := len(prefix) + + if originalID != "" { + // try to keep the whole original client order ID if user specifies it. + if prefixLen+len(originalID) > 32 { + return originalID + } + + clientOrderID = prefix + originalID + return clientOrderID + } + + clientOrderID = uuid.New().String() + clientOrderID = prefix + clientOrderID + if len(clientOrderID) > 32 { + return clientOrderID[0:32] + } + + return clientOrderID +} + +func (e *Exchange) submitSpotOrder(ctx context.Context, order types.SubmitOrder) (*types.Order, error) { + orderType, err := toLocalOrderType(order.Type) + if err != nil { + return nil, err } - req := e.Client.NewCreateOrderService(). + req := e.client.NewCreateOrderService(). Symbol(order.Symbol). Side(binance.SideType(order.Side)). - NewClientOrderID(clientOrderID). Type(orderType) - req.Quantity(order.QuantityString) + clientOrderID := newSpotClientOrderID(order.ClientOrderID) + if len(clientOrderID) > 0 { + req.NewClientOrderID(clientOrderID) + } - if len(order.PriceString) > 0 { - req.Price(order.PriceString) + if order.Market.Symbol != "" { + req.Quantity(order.Market.FormatQuantity(order.Quantity)) + } else { + // TODO: report error + req.Quantity(order.Quantity.FormatString(8)) } + // set price field for limit orders switch order.Type { - case types.OrderTypeStopLimit, types.OrderTypeStopMarket: - if len(order.StopPriceString) == 0 { - return nil, fmt.Errorf("stop price string can not be empty") + case types.OrderTypeStopLimit, types.OrderTypeLimit, types.OrderTypeLimitMaker: + if order.Market.Symbol != "" { + req.Price(order.Market.FormatPrice(order.Price)) + } else { + // TODO: report error + req.Price(order.Price.FormatString(8)) } + } - req.StopPrice(order.StopPriceString) + switch order.Type { + case types.OrderTypeStopLimit, types.OrderTypeStopMarket: + if order.Market.Symbol != "" { + req.StopPrice(order.Market.FormatPrice(order.StopPrice)) + } else { + // TODO: report error + req.StopPrice(order.StopPrice.FormatString(8)) + } } if len(order.TimeInForce) > 0 { // TODO: check the TimeInForce value req.TimeInForce(binance.TimeInForceType(order.TimeInForce)) + } else { + switch order.Type { + case types.OrderTypeLimit, types.OrderTypeStopLimit: + req.TimeInForce(binance.TimeInForceTypeGTC) + } } + req.NewOrderRespType(binance.NewOrderRespTypeRESULT) + response, err := req.Do(ctx) if err != nil { return nil, err } - log.Infof("order creation response: %+v", response) + log.Infof("spot order creation response: %+v", response) - createdOrder, err := ToGlobalOrder(&binance.Order{ + createdOrder, err := toGlobalOrder(&binance.Order{ Symbol: response.Symbol, OrderID: response.OrderID, ClientOrderID: response.ClientOrderID, @@ -527,10 +1192,6 @@ func (e *Exchange) submitSpotOrder(ctx context.Context, order types.SubmitOrder) UpdateTime: response.TransactTime, Time: response.TransactTime, IsIsolated: response.IsIsolated, - // StopPrice: - // IcebergQuantity: - // UpdateTime: - // IsWorking: , }, false) return createdOrder, err @@ -538,10 +1199,15 @@ func (e *Exchange) submitSpotOrder(ctx context.Context, order types.SubmitOrder) func (e *Exchange) SubmitOrders(ctx context.Context, orders ...types.SubmitOrder) (createdOrders types.OrderSlice, err error) { for _, order := range orders { - var createdOrder *types.Order + if err := orderLimiter.Wait(ctx); err != nil { + log.WithError(err).Errorf("order rate limiter wait error") + } + var createdOrder *types.Order if e.IsMargin { createdOrder, err = e.submitMarginOrder(ctx, order) + } else if e.IsFutures { + createdOrder, err = e.submitFuturesOrder(ctx, order) } else { createdOrder, err = e.submitSpotOrder(ctx, order) } @@ -561,27 +1227,39 @@ func (e *Exchange) SubmitOrders(ctx context.Context, orders ...types.SubmitOrder } // QueryKLines queries the Kline/candlestick bars for a symbol. Klines are uniquely identified by their open time. +// Binance uses inclusive start time query range, eg: +// https://api.binance.com/api/v3/klines?symbol=BTCUSDT&interval=1m&startTime=1620172860000 +// the above query will return a kline with startTime = 1620172860000 +// and, +// https://api.binance.com/api/v3/klines?symbol=BTCUSDT&interval=1m&startTime=1620172860000&endTime=1620172920000 +// the above query will return a kline with startTime = 1620172860000, and a kline with endTime = 1620172860000 +// +// the endTime of a binance kline, is the (startTime + interval time - 1 millisecond), e.g., +// millisecond unix timestamp: 1620172860000 and 1620172919999 func (e *Exchange) QueryKLines(ctx context.Context, symbol string, interval types.Interval, options types.KLineQueryOptions) ([]types.KLine, error) { + if e.IsFutures { + return e.QueryFuturesKLines(ctx, symbol, interval, options) + } - var limit = 500 + var limit = 1000 if options.Limit > 0 { - // default limit == 500 + // default limit == 1000 limit = options.Limit } log.Infof("querying kline %s %s %v", symbol, interval, options) - req := e.Client.NewKlinesService(). + req := e.client.NewKlinesService(). Symbol(symbol). Interval(string(interval)). Limit(limit) if options.StartTime != nil { - req.StartTime(options.StartTime.UnixNano() / int64(time.Millisecond)) + req.StartTime(options.StartTime.UnixMilli()) } if options.EndTime != nil { - req.EndTime(options.EndTime.UnixNano() / int64(time.Millisecond)) + req.EndTime(options.EndTime.UnixMilli()) } resp, err := req.Do(ctx) @@ -592,112 +1270,367 @@ func (e *Exchange) QueryKLines(ctx context.Context, symbol string, interval type var kLines []types.KLine for _, k := range resp { kLines = append(kLines, types.KLine{ - Exchange: "binance", - Symbol: symbol, - Interval: interval, - StartTime: time.Unix(0, k.OpenTime*int64(time.Millisecond)), - EndTime: time.Unix(0, k.CloseTime*int64(time.Millisecond)), - Open: util.MustParseFloat(k.Open), - Close: util.MustParseFloat(k.Close), - High: util.MustParseFloat(k.High), - Low: util.MustParseFloat(k.Low), - Volume: util.MustParseFloat(k.Volume), - QuoteVolume: util.MustParseFloat(k.QuoteAssetVolume), - LastTradeID: 0, - NumberOfTrades: uint64(k.TradeNum), - Closed: true, + Exchange: types.ExchangeBinance, + Symbol: symbol, + Interval: interval, + StartTime: types.NewTimeFromUnix(0, k.OpenTime*int64(time.Millisecond)), + EndTime: types.NewTimeFromUnix(0, k.CloseTime*int64(time.Millisecond)), + Open: fixedpoint.MustNewFromString(k.Open), + Close: fixedpoint.MustNewFromString(k.Close), + High: fixedpoint.MustNewFromString(k.High), + Low: fixedpoint.MustNewFromString(k.Low), + Volume: fixedpoint.MustNewFromString(k.Volume), + QuoteVolume: fixedpoint.MustNewFromString(k.QuoteAssetVolume), + TakerBuyBaseAssetVolume: fixedpoint.MustNewFromString(k.TakerBuyBaseAssetVolume), + TakerBuyQuoteAssetVolume: fixedpoint.MustNewFromString(k.TakerBuyQuoteAssetVolume), + LastTradeID: 0, + NumberOfTrades: uint64(k.TradeNum), + Closed: true, }) } + + kLines = types.SortKLinesAscending(kLines) return kLines, nil } -func (e *Exchange) QueryTrades(ctx context.Context, symbol string, options *types.TradeQueryOptions) (trades []types.Trade, err error) { +func (e *Exchange) QueryFuturesKLines(ctx context.Context, symbol string, interval types.Interval, options types.KLineQueryOptions) ([]types.KLine, error) { + + var limit = 1000 + if options.Limit > 0 { + // default limit == 1000 + limit = options.Limit + } + + log.Infof("querying kline %s %s %v", symbol, interval, options) + + req := e.futuresClient.NewKlinesService(). + Symbol(symbol). + Interval(string(interval)). + Limit(limit) + + if options.StartTime != nil { + req.StartTime(options.StartTime.UnixMilli()) + } + + if options.EndTime != nil { + req.EndTime(options.EndTime.UnixMilli()) + } + + resp, err := req.Do(ctx) + if err != nil { + return nil, err + } + + var kLines []types.KLine + for _, k := range resp { + kLines = append(kLines, types.KLine{ + Exchange: types.ExchangeBinance, + Symbol: symbol, + Interval: interval, + StartTime: types.NewTimeFromUnix(0, k.OpenTime*int64(time.Millisecond)), + EndTime: types.NewTimeFromUnix(0, k.CloseTime*int64(time.Millisecond)), + Open: fixedpoint.MustNewFromString(k.Open), + Close: fixedpoint.MustNewFromString(k.Close), + High: fixedpoint.MustNewFromString(k.High), + Low: fixedpoint.MustNewFromString(k.Low), + Volume: fixedpoint.MustNewFromString(k.Volume), + QuoteVolume: fixedpoint.MustNewFromString(k.QuoteAssetVolume), + TakerBuyBaseAssetVolume: fixedpoint.MustNewFromString(k.TakerBuyBaseAssetVolume), + TakerBuyQuoteAssetVolume: fixedpoint.MustNewFromString(k.TakerBuyQuoteAssetVolume), + LastTradeID: 0, + NumberOfTrades: uint64(k.TradeNum), + Closed: true, + }) + } + + kLines = types.SortKLinesAscending(kLines) + return kLines, nil +} + +func (e *Exchange) queryMarginTrades(ctx context.Context, symbol string, options *types.TradeQueryOptions) (trades []types.Trade, err error) { var remoteTrades []*binance.TradeV3 + req := e.client.NewListMarginTradesService(). + IsIsolated(e.IsIsolatedMargin). + Symbol(symbol) - if e.IsMargin { - req := e.Client.NewListMarginTradesService(). - IsIsolated(e.IsIsolatedMargin). - Symbol(symbol) + if options.Limit > 0 { + req.Limit(int(options.Limit)) + } else { + req.Limit(1000) + } - if options.Limit > 0 { - req.Limit(int(options.Limit)) - } + // BINANCE uses inclusive last trade ID + if options.LastTradeID > 0 { + req.FromID(int64(options.LastTradeID)) + } - if options.StartTime != nil { - req.StartTime(options.StartTime.UnixNano() / int64(time.Millisecond)) - } - if options.EndTime != nil { - req.EndTime(options.EndTime.UnixNano() / int64(time.Millisecond)) - } - if options.LastTradeID > 0 { - req.FromID(options.LastTradeID) + if options.StartTime != nil && options.EndTime != nil { + if options.EndTime.Sub(*options.StartTime) < 24*time.Hour { + req.StartTime(options.StartTime.UnixMilli()) + req.EndTime(options.EndTime.UnixMilli()) + } else { + req.StartTime(options.StartTime.UnixMilli()) } + } else if options.StartTime != nil { + req.StartTime(options.StartTime.UnixMilli()) + } else if options.EndTime != nil { + req.EndTime(options.EndTime.UnixMilli()) + } - remoteTrades, err = req.Do(ctx) + remoteTrades, err = req.Do(ctx) + if err != nil { + return nil, err + } + for _, t := range remoteTrades { + localTrade, err := toGlobalTrade(*t, e.IsMargin) if err != nil { - return nil, err + log.WithError(err).Errorf("can not convert binance trade: %+v", t) + continue } + + trades = append(trades, *localTrade) + } + + trades = types.SortTradesAscending(trades) + return trades, nil +} + +func (e *Exchange) queryFuturesTrades(ctx context.Context, symbol string, options *types.TradeQueryOptions) (trades []types.Trade, err error) { + + var remoteTrades []*futures.AccountTrade + req := e.futuresClient.NewListAccountTradeService(). + Symbol(symbol) + if options.Limit > 0 { + req.Limit(int(options.Limit)) } else { - req := e.Client.NewListTradesService(). - Limit(1000). - Symbol(symbol) + req.Limit(1000) + } - if options.Limit > 0 { - req.Limit(int(options.Limit)) - } + // BINANCE uses inclusive last trade ID + if options.LastTradeID > 0 { + req.FromID(int64(options.LastTradeID)) + } - if options.StartTime != nil { - req.StartTime(options.StartTime.UnixNano() / int64(time.Millisecond)) - } - if options.EndTime != nil { - req.EndTime(options.EndTime.UnixNano() / int64(time.Millisecond)) - } - if options.LastTradeID > 0 { - req.FromID(options.LastTradeID) + // The parameter fromId cannot be sent with startTime or endTime. + // Mentioned in binance futures docs + if options.LastTradeID <= 0 { + if options.StartTime != nil && options.EndTime != nil { + if options.EndTime.Sub(*options.StartTime) < 24*time.Hour { + req.StartTime(options.StartTime.UnixMilli()) + req.EndTime(options.EndTime.UnixMilli()) + } else { + req.StartTime(options.StartTime.UnixMilli()) + } + } else if options.EndTime != nil { + req.EndTime(options.EndTime.UnixMilli()) } + } - remoteTrades, err = req.Do(ctx) + remoteTrades, err = req.Do(ctx) + if err != nil { + return nil, err + } + for _, t := range remoteTrades { + localTrade, err := toGlobalFuturesTrade(*t) if err != nil { - return nil, err + log.WithError(err).Errorf("can not convert binance futures trade: %+v", t) + continue + } + + trades = append(trades, *localTrade) + } + + trades = types.SortTradesAscending(trades) + return trades, nil +} + +func (e *Exchange) querySpotTrades(ctx context.Context, symbol string, options *types.TradeQueryOptions) (trades []types.Trade, err error) { + var remoteTrades []*binance.TradeV3 + req := e.client.NewListTradesService(). + Symbol(symbol) + + if options.Limit > 0 { + req.Limit(int(options.Limit)) + } else { + req.Limit(1000) + } + + // BINANCE uses inclusive last trade ID + if options.LastTradeID > 0 { + req.FromID(int64(options.LastTradeID)) + } + + if options.StartTime != nil && options.EndTime != nil { + if options.EndTime.Sub(*options.StartTime) < 24*time.Hour { + req.StartTime(options.StartTime.UnixMilli()) + req.EndTime(options.EndTime.UnixMilli()) + } else { + req.StartTime(options.StartTime.UnixMilli()) } + } else if options.StartTime != nil { + req.StartTime(options.StartTime.UnixMilli()) + } else if options.EndTime != nil { + req.EndTime(options.EndTime.UnixMilli()) } + remoteTrades, err = req.Do(ctx) + if err != nil { + return nil, err + } for _, t := range remoteTrades { - localTrade, err := ToGlobalTrade(*t, e.IsMargin) + localTrade, err := toGlobalTrade(*t, e.IsMargin) if err != nil { log.WithError(err).Errorf("can not convert binance trade: %+v", t) continue } - log.Infof("trade: %d %s % 4s price: % 13s volume: % 11s %6s % 5s %s", t.ID, t.Symbol, localTrade.Side, t.Price, t.Quantity, BuyerOrSellerLabel(t), MakerOrTakerLabel(t), localTrade.Time) trades = append(trades, *localTrade) } + trades = types.SortTradesAscending(trades) return trades, nil } -func (e *Exchange) BatchQueryKLines(ctx context.Context, symbol string, interval types.Interval, startTime, endTime time.Time) ([]types.KLine, error) { - var allKLines []types.KLine +func (e *Exchange) QueryTrades(ctx context.Context, symbol string, options *types.TradeQueryOptions) (trades []types.Trade, err error) { + if e.IsMargin { + return e.queryMarginTrades(ctx, symbol, options) + } else if e.IsFutures { + return e.queryFuturesTrades(ctx, symbol, options) + } else { + return e.querySpotTrades(ctx, symbol, options) + } +} - for startTime.Before(endTime) { - klines, err := e.QueryKLines(ctx, symbol, interval, types.KLineQueryOptions{ - StartTime: &startTime, - Limit: 1000, - }) +// DefaultFeeRates returns the Binance VIP 0 fee schedule +// See also https://www.binance.com/en/fee/schedule +func (e *Exchange) DefaultFeeRates() types.ExchangeFee { + return types.ExchangeFee{ + MakerFeeRate: fixedpoint.NewFromFloat(0.01 * 0.075), // 0.075% + TakerFeeRate: fixedpoint.NewFromFloat(0.01 * 0.075), // 0.075% + } +} +// QueryDepth query the order book depth of a symbol +func (e *Exchange) QueryDepth(ctx context.Context, symbol string) (snapshot types.SliceOrderBook, finalUpdateID int64, err error) { + var response *binance.DepthResponse + if e.IsFutures { + res, err := e.futuresClient.NewDepthService().Symbol(symbol).Do(ctx) if err != nil { - return nil, err + return snapshot, finalUpdateID, err + } + response = &binance.DepthResponse{ + LastUpdateID: res.LastUpdateID, + Bids: res.Bids, + Asks: res.Asks, + } + } else { + response, err = e.client.NewDepthService().Symbol(symbol).Do(ctx) + if err != nil { + return snapshot, finalUpdateID, err } + } - for _, kline := range klines { - if kline.EndTime.After(endTime) { - return allKLines, nil - } + snapshot.Symbol = symbol + finalUpdateID = response.LastUpdateID + for _, entry := range response.Bids { + // entry.Price, Quantity: entry.Quantity + price, err := fixedpoint.NewFromString(entry.Price) + if err != nil { + return snapshot, finalUpdateID, err + } + + quantity, err := fixedpoint.NewFromString(entry.Quantity) + if err != nil { + return snapshot, finalUpdateID, err + } + + snapshot.Bids = append(snapshot.Bids, types.PriceVolume{Price: price, Volume: quantity}) + } + + for _, entry := range response.Asks { + price, err := fixedpoint.NewFromString(entry.Price) + if err != nil { + return snapshot, finalUpdateID, err + } - allKLines = append(allKLines, kline) - startTime = kline.EndTime + quantity, err := fixedpoint.NewFromString(entry.Quantity) + if err != nil { + return snapshot, finalUpdateID, err } + + snapshot.Asks = append(snapshot.Asks, types.PriceVolume{Price: price, Volume: quantity}) + } + + return snapshot, finalUpdateID, nil +} + +// QueryPremiumIndex is only for futures +func (e *Exchange) QueryPremiumIndex(ctx context.Context, symbol string) (*types.PremiumIndex, error) { + // when symbol is set, only one index will be returned. + indexes, err := e.futuresClient.NewPremiumIndexService().Symbol(symbol).Do(ctx) + if err != nil { + return nil, err + } + + return convertPremiumIndex(indexes[0]) +} + +func (e *Exchange) QueryFundingRateHistory(ctx context.Context, symbol string) (*types.FundingRate, error) { + rates, err := e.futuresClient.NewFundingRateService(). + Symbol(symbol). + Limit(1). + Do(ctx) + if err != nil { + return nil, err + } + + if len(rates) == 0 { + return nil, errors.New("empty funding rate data") + } + + rate := rates[0] + fundingRate, err := fixedpoint.NewFromString(rate.FundingRate) + if err != nil { + return nil, err + } + + return &types.FundingRate{ + FundingRate: fundingRate, + FundingTime: time.Unix(0, rate.FundingTime*int64(time.Millisecond)), + Time: time.Unix(0, rate.Time*int64(time.Millisecond)), + }, nil +} + +func (e *Exchange) QueryPositionRisk(ctx context.Context, symbol string) (*types.PositionRisk, error) { + // when symbol is set, only one position risk will be returned. + risks, err := e.futuresClient.NewGetPositionRiskService().Symbol(symbol).Do(ctx) + if err != nil { + return nil, err + } + + return convertPositionRisk(risks[0]) +} + +func getLaunchDate() (time.Time, error) { + // binance launch date 12:00 July 14th, 2017 + loc, err := time.LoadLocation("Asia/Shanghai") + if err != nil { + return time.Time{}, err + } + + return time.Date(2017, time.July, 14, 0, 0, 0, 0, loc), nil +} + +// Margin tolerance ranges from 0.0 (liquidation) to 1.0 (safest level of margin). +func calculateMarginTolerance(marginLevel fixedpoint.Value) fixedpoint.Value { + if marginLevel.IsZero() { + // Although margin level shouldn't be zero, that would indicate a significant problem. + // In that case, margin tolerance should return 0.0 to also reflect that problem. + return fixedpoint.Zero } - return allKLines, nil + // Formula created by operations team for our binance code. Liquidation occurs at 1.1, + // so when marginLevel equals 1.1, the formula becomes 1.0 - 1.0, or zero. + // = 1.0 - (1.1 / marginLevel) + return fixedpoint.One.Sub(fixedpoint.NewFromFloat(1.1).Div(marginLevel)) } diff --git a/pkg/exchange/binance/exchange_test.go b/pkg/exchange/binance/exchange_test.go new file mode 100644 index 0000000000..bca405a820 --- /dev/null +++ b/pkg/exchange/binance/exchange_test.go @@ -0,0 +1,17 @@ +package binance + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_newClientOrderID(t *testing.T) { + cID := newSpotClientOrderID("") + assert.Len(t, cID, 32) + strings.HasPrefix(cID, "x-"+spotBrokerID) + + cID = newSpotClientOrderID("myid1") + assert.Equal(t, cID, "x-"+spotBrokerID+"myid1") +} diff --git a/pkg/exchange/binance/margin_history.go b/pkg/exchange/binance/margin_history.go new file mode 100644 index 0000000000..5408e04ba5 --- /dev/null +++ b/pkg/exchange/binance/margin_history.go @@ -0,0 +1,167 @@ +package binance + +import ( + "context" + "time" + + "github.com/c9s/bbgo/pkg/types" +) + +func (e *Exchange) QueryLoanHistory(ctx context.Context, asset string, startTime, endTime *time.Time) ([]types.MarginLoan, error) { + req := e.client2.NewGetMarginLoanHistoryRequest() + req.Asset(asset) + req.Size(100) + + if startTime != nil { + req.StartTime(*startTime) + + // 6 months + if time.Since(*startTime) > time.Hour*24*30*6 { + req.Archived(true) + } + } + + if startTime != nil && endTime != nil { + duration := endTime.Sub(*startTime) + if duration > time.Hour*24*30 { + t := startTime.Add(time.Hour * 24 * 30) + endTime = &t + } + } + + if endTime != nil { + req.EndTime(*endTime) + } + + if e.MarginSettings.IsIsolatedMargin { + req.IsolatedSymbol(e.MarginSettings.IsolatedMarginSymbol) + } + + records, err := req.Do(ctx) + if err != nil { + return nil, err + } + + var loans []types.MarginLoan + for _, record := range records { + loans = append(loans, toGlobalLoan(record)) + } + + return loans, err +} + +func (e *Exchange) QueryRepayHistory(ctx context.Context, asset string, startTime, endTime *time.Time) ([]types.MarginRepay, error) { + req := e.client2.NewGetMarginRepayHistoryRequest() + req.Asset(asset) + req.Size(100) + + if startTime != nil { + req.StartTime(*startTime) + + // 6 months + if time.Since(*startTime) > time.Hour*24*30*6 { + req.Archived(true) + } + } + + if startTime != nil && endTime != nil { + duration := endTime.Sub(*startTime) + if duration > time.Hour*24*30 { + t := startTime.Add(time.Hour * 24 * 30) + endTime = &t + } + } + + if endTime != nil { + req.EndTime(*endTime) + } + + if e.MarginSettings.IsIsolatedMargin { + req.IsolatedSymbol(e.MarginSettings.IsolatedMarginSymbol) + } + + records, err := req.Do(ctx) + + var repays []types.MarginRepay + for _, record := range records { + repays = append(repays, toGlobalRepay(record)) + } + + return repays, err +} + +func (e *Exchange) QueryLiquidationHistory(ctx context.Context, startTime, endTime *time.Time) ([]types.MarginLiquidation, error) { + req := e.client2.NewGetMarginLiquidationHistoryRequest() + req.Size(100) + + if startTime != nil { + req.StartTime(*startTime) + } + + if startTime != nil && endTime != nil { + duration := endTime.Sub(*startTime) + if duration > time.Hour*24*30 { + t := startTime.Add(time.Hour * 24 * 30) + endTime = &t + } + } + + if endTime != nil { + req.EndTime(*endTime) + } + + if e.MarginSettings.IsIsolatedMargin { + req.IsolatedSymbol(e.MarginSettings.IsolatedMarginSymbol) + } + + records, err := req.Do(ctx) + var liquidations []types.MarginLiquidation + for _, record := range records { + liquidations = append(liquidations, toGlobalLiquidation(record)) + } + + return liquidations, err +} + +func (e *Exchange) QueryInterestHistory(ctx context.Context, asset string, startTime, endTime *time.Time) ([]types.MarginInterest, error) { + req := e.client2.NewGetMarginInterestHistoryRequest() + req.Asset(asset) + req.Size(100) + + if startTime != nil { + req.StartTime(*startTime) + + // 6 months + if time.Since(*startTime) > time.Hour*24*30*6 { + req.Archived(true) + } + } + + if startTime != nil && endTime != nil { + duration := endTime.Sub(*startTime) + if duration > time.Hour*24*30 { + t := startTime.Add(time.Hour * 24 * 30) + endTime = &t + } + } + + if endTime != nil { + req.EndTime(*endTime) + } + + if e.MarginSettings.IsIsolatedMargin { + req.IsolatedSymbol(e.MarginSettings.IsolatedMarginSymbol) + } + + records, err := req.Do(ctx) + if err != nil { + return nil, err + } + + var interests []types.MarginInterest + for _, record := range records { + interests = append(interests, toGlobalInterest(record)) + } + + return interests, err +} diff --git a/pkg/exchange/binance/parse.go b/pkg/exchange/binance/parse.go index 65dfac7642..49a6874394 100644 --- a/pkg/exchange/binance/parse.go +++ b/pkg/exchange/binance/parse.go @@ -6,12 +6,13 @@ import ( "fmt" "time" + "github.com/adshao/go-binance/v2/futures" + "github.com/adshao/go-binance/v2" "github.com/valyala/fastjson" "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" - "github.com/c9s/bbgo/pkg/util" ) /* @@ -49,28 +50,38 @@ executionReport "M": false, // Ignore "O": 1499405658657, // Order creation time "Z": "0.00000000", // Cumulative quote asset transacted quantity - "Y": "0.00000000", // Last quote asset transacted quantity (i.e. lastPrice * lastQty) - "Q": "0.00000000" // Quote Order Qty + "Y": "0.00000000", // Last quote asset transacted quantity (i.e. lastPrice * lastQty) + "Q": "0.00000000" // Quote Order Quantity } */ type ExecutionReportEvent struct { EventBase - Symbol string `json:"s"` - ClientOrderID string `json:"c"` - Side string `json:"S"` - OrderType string `json:"o"` - TimeInForce string `json:"f"` + Symbol string `json:"s"` + Side string `json:"S"` + + ClientOrderID string `json:"c"` + OriginalClientOrderID string `json:"C"` + + OrderType string `json:"o"` + OrderCreationTime int64 `json:"O"` + + TimeInForce string `json:"f"` + IcebergQuantity fixedpoint.Value `json:"F"` - OrderQuantity string `json:"q"` - OrderPrice string `json:"p"` - StopPrice string `json:"P"` + OrderQuantity fixedpoint.Value `json:"q"` + QuoteOrderQuantity fixedpoint.Value `json:"Q"` + + OrderPrice fixedpoint.Value `json:"p"` + StopPrice fixedpoint.Value `json:"P"` IsOnBook bool `json:"w"` - IsMaker bool `json:"m"` - CommissionAmount string `json:"n"` - CommissionAsset string `json:"N"` + IsMaker bool `json:"m"` + Ignore bool `json:"M"` + + CommissionAmount fixedpoint.Value `json:"n"` + CommissionAsset string `json:"N"` CurrentExecutionType string `json:"x"` CurrentOrderStatus string `json:"X"` @@ -81,19 +92,20 @@ type ExecutionReportEvent struct { TradeID int64 `json:"t"` TransactionTime int64 `json:"T"` - LastExecutedQuantity string `json:"l"` - CumulativeFilledQuantity string `json:"z"` - LastExecutedPrice string `json:"L"` - LastQuoteAssetTransactedQuantity string `json:"Y"` + LastExecutedQuantity fixedpoint.Value `json:"l"` + LastExecutedPrice fixedpoint.Value `json:"L"` + + CumulativeFilledQuantity fixedpoint.Value `json:"z"` + CumulativeQuoteAssetTransactedQuantity fixedpoint.Value `json:"Z"` - OrderCreationTime int64 `json:"O"` + LastQuoteAssetTransactedQuantity fixedpoint.Value `json:"Y"` } func (e *ExecutionReportEvent) Order() (*types.Order, error) { - switch e.CurrentExecutionType { case "NEW", "CANCELED", "REJECTED", "EXPIRED": case "REPLACED": + case "TRADE": // For Order FILLED status. And the order has been completed. default: return nil, errors.New("execution report type is not for order") } @@ -101,18 +113,25 @@ func (e *ExecutionReportEvent) Order() (*types.Order, error) { orderCreationTime := time.Unix(0, e.OrderCreationTime*int64(time.Millisecond)) return &types.Order{ SubmitOrder: types.SubmitOrder{ - Symbol: e.Symbol, ClientOrderID: e.ClientOrderID, + Symbol: e.Symbol, Side: toGlobalSideType(binance.SideType(e.Side)), Type: toGlobalOrderType(binance.OrderType(e.OrderType)), - Quantity: util.MustParseFloat(e.OrderQuantity), - Price: util.MustParseFloat(e.OrderPrice), - TimeInForce: e.TimeInForce, + Quantity: e.OrderQuantity, + Price: e.OrderPrice, + StopPrice: e.StopPrice, + TimeInForce: types.TimeInForce(e.TimeInForce), + IsFutures: false, + ReduceOnly: false, + ClosePosition: false, }, + Exchange: types.ExchangeBinance, + IsWorking: e.IsOnBook, OrderID: uint64(e.OrderID), Status: toGlobalOrderStatus(binance.OrderStatusType(e.CurrentOrderStatus)), - ExecutedQuantity: util.MustParseFloat(e.CumulativeFilledQuantity), - CreationTime: orderCreationTime, + ExecutedQuantity: e.CumulativeFilledQuantity, + CreationTime: types.Time(orderCreationTime), + UpdateTime: types.Time(orderCreationTime), }, nil } @@ -123,17 +142,18 @@ func (e *ExecutionReportEvent) Trade() (*types.Trade, error) { tt := time.Unix(0, e.TransactionTime*int64(time.Millisecond)) return &types.Trade{ - ID: e.TradeID, + ID: uint64(e.TradeID), + Exchange: types.ExchangeBinance, Symbol: e.Symbol, OrderID: uint64(e.OrderID), Side: toGlobalSideType(binance.SideType(e.Side)), - Price: util.MustParseFloat(e.LastExecutedPrice), - Quantity: util.MustParseFloat(e.LastExecutedQuantity), - QuoteQuantity: util.MustParseFloat(e.LastQuoteAssetTransactedQuantity), + Price: e.LastExecutedPrice, + Quantity: e.LastExecutedQuantity, + QuoteQuantity: e.LastQuoteAssetTransactedQuantity, IsBuyer: e.Side == "BUY", IsMaker: e.IsMaker, - Time: tt, - Fee: util.MustParseFloat(e.CommissionAmount), + Time: types.Time(tt), + Fee: e.CommissionAmount, FeeCurrency: e.CommissionAsset, }, nil } @@ -172,7 +192,7 @@ outboundAccountInfo "W": true, // Can withdraw? "D": true, // Can deposit? "u": 1499405658848, // Time of last account update - "B": [ // Balances array + "B": [ // AccountBalances array { "a": "LTC", // Asset "f": "17366.18538083", // Free amount @@ -206,9 +226,9 @@ outboundAccountInfo */ type Balance struct { - Asset string `json:"a"` - Free string `json:"f"` - Locked string `json:"l"` + Asset string `json:"a"` + Free fixedpoint.Value `json:"f"` + Locked fixedpoint.Value `json:"l"` } type OutboundAccountPositionEvent struct { @@ -241,19 +261,34 @@ type ResultEvent struct { ID int `json:"id"` } -func ParseEvent(message string) (interface{}, error) { - val, err := fastjson.Parse(message) +func parseWebSocketEvent(message []byte) (interface{}, error) { + val, err := fastjson.ParseBytes(message) + if err != nil { return nil, err } + // res, err := json.MarshalIndent(message, "", " ") + // if err != nil { + // log.Fatal(err) + // } + // str := strings.ReplaceAll(string(res), "\\", "") + // fmt.Println(str) eventType := string(val.GetStringBytes("e")) + if eventType == "" && IsBookTicker(val) { + eventType = "bookTicker" + } switch eventType { case "kline": var event KLineEvent err := json.Unmarshal([]byte(message), &event) return &event, err + case "bookTicker": + var event BookTickerEvent + err := json.Unmarshal([]byte(message), &event) + event.Event = eventType + return &event, err case "outboundAccountPosition": var event OutboundAccountPositionEvent @@ -278,6 +313,39 @@ func ParseEvent(message string) (interface{}, error) { case "depthUpdate": return parseDepthEvent(val) + case "markPriceUpdate": + var event MarkPriceUpdateEvent + err := json.Unmarshal([]byte(message), &event) + return &event, err + + // Binance futures data -------------- + case "continuousKline": + var event ContinuousKLineEvent + err := json.Unmarshal([]byte(message), &event) + return &event, err + + case "ORDER_TRADE_UPDATE": + var event OrderTradeUpdateEvent + err := json.Unmarshal([]byte(message), &event) + return &event, err + + // Event: Balance and Position Update + case "ACCOUNT_UPDATE": + var event AccountUpdateEvent + err := json.Unmarshal([]byte(message), &event) + return &event, err + + // Event: Order Update + case "ACCOUNT_CONFIG_UPDATE": + var event AccountConfigUpdateEvent + err := json.Unmarshal([]byte(message), &event) + return &event, err + + case "trade": + var event MarketTradeEvent + err := json.Unmarshal([]byte(message), &event) + return &event, err + default: id := val.GetInt("id") if id > 0 { @@ -288,9 +356,17 @@ func ParseEvent(message string) (interface{}, error) { return nil, fmt.Errorf("unsupported message: %s", message) } +// IsBookTicker document ref :https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams +// use key recognition because there's no identify in the content. +func IsBookTicker(val *fastjson.Value) bool { + return !val.Exists("e") && val.Exists("u") && + val.Exists("s") && val.Exists("b") && + val.Exists("B") && val.Exists("a") && val.Exists("A") +} + type DepthEntry struct { - PriceLevel string - Quantity string + PriceLevel fixedpoint.Value + Quantity fixedpoint.Value } type DepthEvent struct { @@ -300,55 +376,41 @@ type DepthEvent struct { FirstUpdateID int64 `json:"U"` FinalUpdateID int64 `json:"u"` - Bids []DepthEntry - Asks []DepthEntry + Bids types.PriceVolumeSlice `json:"b"` + Asks types.PriceVolumeSlice `json:"a"` } -func (e *DepthEvent) OrderBook() (book types.OrderBook, err error) { - book.Symbol = e.Symbol - - for _, entry := range e.Bids { - quantity, err := fixedpoint.NewFromString(entry.Quantity) - if err != nil { - continue - } - - price, err := fixedpoint.NewFromString(entry.PriceLevel) - if err != nil { - continue - } - - pv := types.PriceVolume{ - Price: price, - Volume: quantity, - } +func (e *DepthEvent) String() (o string) { + o += fmt.Sprintf("Depth %s bid/ask = ", e.Symbol) - book.Bids = book.Bids.Upsert(pv, true) + if len(e.Bids) == 0 { + o += "empty" + } else { + o += e.Bids[0].Price.String() } - for _, entry := range e.Asks { - quantity, err := fixedpoint.NewFromString(entry.Quantity) - if err != nil { - continue - } + o += "/" - price, err := fixedpoint.NewFromString(entry.PriceLevel) - if err != nil { - continue - } + if len(e.Asks) == 0 { + o += "empty" + } else { + o += e.Asks[0].Price.String() + } - pv := types.PriceVolume{ - Price: price, - Volume: quantity, - } + o += fmt.Sprintf(" %d ~ %d", e.FirstUpdateID, e.FinalUpdateID) + return o +} - book.Asks = book.Asks.Upsert(pv, false) - } +func (e *DepthEvent) OrderBook() (book types.SliceOrderBook, err error) { + book.Symbol = e.Symbol - return + // already in descending order + book.Bids = e.Bids + book.Asks = e.Asks + return book, err } -func parseDepthEntry(val *fastjson.Value) (*DepthEntry, error) { +func parseDepthEntry(val *fastjson.Value) (*types.PriceVolume, error) { arr, err := val.Array() if err != nil { return nil, err @@ -358,9 +420,19 @@ func parseDepthEntry(val *fastjson.Value) (*DepthEntry, error) { return nil, errors.New("incorrect depth entry element length") } - return &DepthEntry{ - PriceLevel: string(arr[0].GetStringBytes()), - Quantity: string(arr[1].GetStringBytes()), + price, err := fixedpoint.NewFromString(string(arr[0].GetStringBytes())) + if err != nil { + return nil, err + } + + quantity, err := fixedpoint.NewFromString(string(arr[1].GetStringBytes())) + if err != nil { + return nil, err + } + + return &types.PriceVolume{ + Price: price, + Volume: quantity, }, nil } @@ -399,6 +471,73 @@ func parseDepthEvent(val *fastjson.Value) (*DepthEvent, error) { return depth, err } +type MarketTradeEvent struct { + EventBase + Symbol string `json:"s"` + Quantity fixedpoint.Value `json:"q"` + Price fixedpoint.Value `json:"p"` + + BuyerOrderId int64 `json:"b"` + SellerOrderId int64 `json:"a"` + + OrderTradeTime int64 `json:"T"` + TradeId int64 `json:"t"` + + IsMaker bool `json:"m"` + Dummy bool `json:"M"` +} + +/* + +market trade + +{ + "e": "trade", // Event type + "E": 123456789, // Event time + "s": "BNBBTC", // Symbol + "t": 12345, // Trade ID + "p": "0.001", // Price + "q": "100", // Quantity + "b": 88, // Buyer order ID + "a": 50, // Seller order ID + "T": 123456785, // Trade time + "m": true, // Is the buyer the market maker? + "M": true // Ignore +} + +*/ + +func (e *MarketTradeEvent) Trade() types.Trade { + tt := time.Unix(0, e.OrderTradeTime*int64(time.Millisecond)) + var orderId int64 + var side types.SideType + var isBuyer bool + if e.IsMaker { + orderId = e.SellerOrderId // seller is taker + side = types.SideTypeSell + isBuyer = false + } else { + orderId = e.BuyerOrderId // buyer is taker + side = types.SideTypeBuy + isBuyer = true + } + return types.Trade{ + ID: uint64(e.TradeId), + Exchange: types.ExchangeBinance, + Symbol: e.Symbol, + OrderID: uint64(orderId), + Side: side, + Price: e.Price, + Quantity: e.Quantity, + QuoteQuantity: e.Quantity, + IsBuyer: isBuyer, + IsMaker: e.IsMaker, + Time: types.Time(tt), + Fee: fixedpoint.Zero, + FeeCurrency: "", + } +} + type KLine struct { StartTime int64 `json:"t"` EndTime int64 `json:"T"` @@ -406,44 +545,22 @@ type KLine struct { Symbol string `json:"s"` Interval string `json:"i"` - Open string `json:"o"` - Close string `json:"c"` - High string `json:"h"` + Open fixedpoint.Value `json:"o"` + Close fixedpoint.Value `json:"c"` + High fixedpoint.Value `json:"h"` + Low fixedpoint.Value `json:"l"` + + Volume fixedpoint.Value `json:"v"` // base asset volume (like 10 BTC) + QuoteVolume fixedpoint.Value `json:"q"` // quote asset volume - Low string `json:"l"` - Volume string `json:"V"` // taker buy base asset volume (like 10 BTC) - QuoteVolume string `json:"Q"` // taker buy quote asset volume (like 1000USDT) + TakerBuyBaseAssetVolume fixedpoint.Value `json:"V"` // taker buy base asset volume (like 10 BTC) + TakerBuyQuoteAssetVolume fixedpoint.Value `json:"Q"` // taker buy quote asset volume (like 1000USDT) LastTradeID int `json:"L"` NumberOfTrades int64 `json:"n"` Closed bool `json:"x"` } -type KLineEvent struct { - EventBase - Symbol string `json:"s"` - KLine KLine `json:"k,omitempty"` -} - -func (k *KLine) KLine() types.KLine { - return types.KLine{ - Exchange: "binance", - Symbol: k.Symbol, - Interval: types.Interval(k.Interval), - StartTime: time.Unix(0, k.StartTime*int64(time.Millisecond)), - EndTime: time.Unix(0, k.EndTime*int64(time.Millisecond)), - Open: util.MustParseFloat(k.Open), - Close: util.MustParseFloat(k.Close), - High: util.MustParseFloat(k.High), - Low: util.MustParseFloat(k.Low), - Volume: util.MustParseFloat(k.Volume), - QuoteVolume: util.MustParseFloat(k.QuoteVolume), - LastTradeID: uint64(k.LastTradeID), - NumberOfTrades: uint64(k.NumberOfTrades), - Closed: k.Closed, - } -} - /* kline @@ -474,7 +591,285 @@ kline } */ + +type KLineEvent struct { + EventBase + Symbol string `json:"s"` + KLine KLine `json:"k,omitempty"` +} + +func (k *KLine) KLine() types.KLine { + return types.KLine{ + Exchange: types.ExchangeBinance, + Symbol: k.Symbol, + Interval: types.Interval(k.Interval), + StartTime: types.NewTimeFromUnix(0, k.StartTime*int64(time.Millisecond)), + EndTime: types.NewTimeFromUnix(0, k.EndTime*int64(time.Millisecond)), + Open: k.Open, + Close: k.Close, + High: k.High, + Low: k.Low, + Volume: k.Volume, + QuoteVolume: k.QuoteVolume, + TakerBuyBaseAssetVolume: k.TakerBuyBaseAssetVolume, + TakerBuyQuoteAssetVolume: k.TakerBuyQuoteAssetVolume, + LastTradeID: uint64(k.LastTradeID), + NumberOfTrades: uint64(k.NumberOfTrades), + Closed: k.Closed, + } +} + +type MarkPriceUpdateEvent struct { + EventBase + + Symbol string `json:"s"` + + MarkPrice fixedpoint.Value `json:"p"` + IndexPrice fixedpoint.Value `json:"i"` + EstimatedPrice fixedpoint.Value `json:"P"` + + FundingRate fixedpoint.Value `json:"r"` + NextFundingTime int64 `json:"T"` +} + +/* +{ + "e": "markPriceUpdate", // Event type + "E": 1562305380000, // Event time + "s": "BTCUSDT", // Symbol + "p": "11794.15000000", // Mark price + "i": "11784.62659091", // Index price + "P": "11784.25641265", // Estimated Settle Price, only useful in the last hour before the settlement starts + "r": "0.00038167", // Funding rate + "T": 1562306400000 // Next funding time +} +*/ + +type ContinuousKLineEvent struct { + EventBase + Symbol string `json:"ps"` + ct string `json:"ct"` + KLine KLine `json:"k,omitempty"` +} + +/* +{ + "e":"continuous_kline", // Event type + "E":1607443058651, // Event time + "ps":"BTCUSDT", // Pair + "ct":"PERPETUAL" // Contract type + "k":{ + "t":1607443020000, // Kline start time + "T":1607443079999, // Kline close time + "i":"1m", // Interval + "f":116467658886, // First trade ID + "L":116468012423, // Last trade ID + "o":"18787.00", // Open price + "c":"18804.04", // Close price + "h":"18804.04", // High price + "l":"18786.54", // Low price + "v":"197.664", // volume + "n": 543, // Number of trades + "x":false, // Is this kline closed? + "q":"3715253.19494", // Quote asset volume + "V":"184.769", // Taker buy volume + "Q":"3472925.84746", //Taker buy quote asset volume + "B":"0" // Ignore + } +} +*/ + +// Similar to the ExecutionReportEvent's fields. But with totally different json key. +// e.g., Stop price. So that, we can not merge them. +type OrderTrade struct { + Symbol string `json:"s"` + ClientOrderID string `json:"c"` + Side string `json:"S"` + OrderType string `json:"o"` + TimeInForce string `json:"f"` + OriginalQuantity fixedpoint.Value `json:"q"` + OriginalPrice fixedpoint.Value `json:"p"` + + AveragePrice fixedpoint.Value `json:"ap"` + StopPrice fixedpoint.Value `json:"sp"` + CurrentExecutionType string `json:"x"` + CurrentOrderStatus string `json:"X"` + + OrderId int64 `json:"i"` + OrderLastFilledQuantity fixedpoint.Value `json:"l"` + OrderFilledAccumulatedQuantity fixedpoint.Value `json:"z"` + LastFilledPrice fixedpoint.Value `json:"L"` + + CommissionAmount fixedpoint.Value `json:"n"` + CommissionAsset string `json:"N"` + + OrderTradeTime int64 `json:"T"` + TradeId int64 `json:"t"` + + BidsNotional string `json:"b"` + AskNotional string `json:"a"` + + IsMaker bool `json:"m"` + IsReduceOnly bool ` json:"r"` + + StopPriceWorkingType string `json:"wt"` + OriginalOrderType string `json:"ot"` + PositionSide string `json:"ps"` + RealizedProfit string `json:"rp"` +} + +type OrderTradeUpdateEvent struct { + EventBase + Transaction int64 `json:"T"` + OrderTrade OrderTrade `json:"o"` +} + +// { + +// "e":"ORDER_TRADE_UPDATE", // Event Type +// "E":1568879465651, // Event Time +// "T":1568879465650, // Transaction Time +// "o":{ +// "s":"BTCUSDT", // Symbol +// "c":"TEST", // Client Order Id +// // special client order id: +// // starts with "autoclose-": liquidation order +// // "adl_autoclose": ADL auto close order +// "S":"SELL", // Side +// "o":"TRAILING_STOP_MARKET", // Order Type +// "f":"GTC", // Time in Force +// "q":"0.001", // Original Quantity +// "p":"0", // Original Price +// "ap":"0", // Average Price +// "sp":"7103.04", // Stop Price. Please ignore with TRAILING_STOP_MARKET order +// "x":"NEW", // Execution Type +// "X":"NEW", // Order Status +// "i":8886774, // Order Id +// "l":"0", // Order Last Filled Quantity +// "z":"0", // Order Filled Accumulated Quantity +// "L":"0", // Last Filled Price +// "N":"USDT", // Commission Asset, will not push if no commission +// "n":"0", // Commission, will not push if no commission +// "T":1568879465651, // Order Trade Time +// "t":0, // Trade Id +// "b":"0", // Bids Notional +// "a":"9.91", // Ask Notional +// "m":false, // Is this trade the maker side? +// "R":false, // Is this reduce only +// "wt":"CONTRACT_PRICE", // Stop Price Working Type +// "ot":"TRAILING_STOP_MARKET", // Original Order Type +// "ps":"LONG", // Position Side +// "cp":false, // If Close-All, pushed with conditional order +// "AP":"7476.89", // Activation Price, only puhed with TRAILING_STOP_MARKET order +// "cr":"5.0", // Callback Rate, only puhed with TRAILING_STOP_MARKET order +// "rp":"0" // Realized Profit of the trade +// } + +// } + +func (e *OrderTradeUpdateEvent) OrderFutures() (*types.Order, error) { + + switch e.OrderTrade.CurrentExecutionType { + case "NEW", "CANCELED", "EXPIRED": + case "CALCULATED - Liquidation Execution": + case "TRADE": // For Order FILLED status. And the order has been completed. + default: + return nil, errors.New("execution report type is not for futures order") + } + + orderCreationTime := time.Unix(0, e.OrderTrade.OrderTradeTime*int64(time.Millisecond)) + return &types.Order{ + Exchange: types.ExchangeBinance, + SubmitOrder: types.SubmitOrder{ + Symbol: e.OrderTrade.Symbol, + ClientOrderID: e.OrderTrade.ClientOrderID, + Side: toGlobalFuturesSideType(futures.SideType(e.OrderTrade.Side)), + Type: toGlobalFuturesOrderType(futures.OrderType(e.OrderTrade.OrderType)), + Quantity: e.OrderTrade.OriginalQuantity, + Price: e.OrderTrade.OriginalPrice, + TimeInForce: types.TimeInForce(e.OrderTrade.TimeInForce), + }, + OrderID: uint64(e.OrderTrade.OrderId), + Status: toGlobalFuturesOrderStatus(futures.OrderStatusType(e.OrderTrade.CurrentOrderStatus)), + ExecutedQuantity: e.OrderTrade.OrderFilledAccumulatedQuantity, + CreationTime: types.Time(orderCreationTime), + }, nil +} + +func (e *OrderTradeUpdateEvent) TradeFutures() (*types.Trade, error) { + if e.OrderTrade.CurrentExecutionType != "TRADE" { + return nil, errors.New("execution report is not a futures trade") + } + + tt := time.Unix(0, e.OrderTrade.OrderTradeTime*int64(time.Millisecond)) + return &types.Trade{ + ID: uint64(e.OrderTrade.TradeId), + Exchange: types.ExchangeBinance, + Symbol: e.OrderTrade.Symbol, + OrderID: uint64(e.OrderTrade.OrderId), + Side: toGlobalSideType(binance.SideType(e.OrderTrade.Side)), + Price: e.OrderTrade.LastFilledPrice, + Quantity: e.OrderTrade.OrderLastFilledQuantity, + QuoteQuantity: e.OrderTrade.LastFilledPrice.Mul(e.OrderTrade.OrderLastFilledQuantity), + IsBuyer: e.OrderTrade.Side == "BUY", + IsMaker: e.OrderTrade.IsMaker, + Time: types.Time(tt), + Fee: e.OrderTrade.CommissionAmount, + FeeCurrency: e.OrderTrade.CommissionAsset, + }, nil +} + +type AccountUpdate struct { + EventReasonType string `json:"m"` + Balances []*futures.Balance `json:"B,omitempty"` + Positions []*futures.AccountPosition `json:"P,omitempty"` +} + +type AccountUpdateEvent struct { + EventBase + Transaction int64 `json:"T"` + + AccountUpdate AccountUpdate `json:"a"` +} + +type AccountConfig struct { + Symbol string `json:"s"` + Leverage fixedpoint.Value `json:"l"` +} + +type AccountConfigUpdateEvent struct { + EventBase + Transaction int64 `json:"T"` + + AccountConfig AccountConfig `json:"ac"` +} + type EventBase struct { Event string `json:"e"` // event Time int64 `json:"E"` } + +type BookTickerEvent struct { + EventBase + Symbol string `json:"s"` + Buy fixedpoint.Value `json:"b"` + BuySize fixedpoint.Value `json:"B"` + Sell fixedpoint.Value `json:"a"` + SellSize fixedpoint.Value `json:"A"` + // "u":400900217, // order book updateId + // "s":"BNBUSDT", // symbol + // "b":"25.35190000", // best bid price + // "B":"31.21000000", // best bid qty + // "a":"25.36520000", // best ask price + // "A":"40.66000000" // best ask qty +} + +func (k *BookTickerEvent) BookTicker() types.BookTicker { + return types.BookTicker{ + Symbol: k.Symbol, + Buy: k.Buy, + BuySize: k.BuySize, + Sell: k.Sell, + SellSize: k.SellSize, + } +} diff --git a/pkg/exchange/binance/parse_test.go b/pkg/exchange/binance/parse_test.go index a6c1dd5b9f..0d83664ce1 100644 --- a/pkg/exchange/binance/parse_test.go +++ b/pkg/exchange/binance/parse_test.go @@ -5,6 +5,8 @@ import ( "testing" "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" ) var jsCommentTrimmer = regexp.MustCompile("(?m)//.*$") @@ -127,7 +129,7 @@ func TestMarginResponseParsing(t *testing.T) { for _, testcase := range testcases { payload := testcase.input payload = jsCommentTrimmer.ReplaceAllLiteralString(payload, "") - event, err := ParseEvent(payload) + event, err := parseWebSocketEvent([]byte(payload)) assert.NoError(t, err) assert.NotNil(t, event) } @@ -144,7 +146,7 @@ func TestParseOrderUpdate(t *testing.T) { "f": "GTC", // Time in force "q": "1.00000000", // Order quantity "p": "0.10264410", // Order price - "P": "0.00000000", // Stop price + "P": "0.222", // Stop price "F": "0.00000000", // Iceberg quantity "g": -1, // OrderListId "C": null, // Original client order ID; This is the ID of the order being canceled @@ -154,7 +156,7 @@ func TestParseOrderUpdate(t *testing.T) { "i": 4293153, // Order ID "l": "0.00000000", // Last executed quantity "z": "0.00000000", // Cumulative filled quantity - "L": "0.00000000", // Last executed price + "L": "0.00000001", // Last executed price "n": "0", // Commission amount "N": null, // Commission asset "T": 1499405658657, // Transaction time @@ -162,16 +164,16 @@ func TestParseOrderUpdate(t *testing.T) { "I": 8641984, // Ignore "w": true, // Is the order on the book? "m": false, // Is this trade the maker side? - "M": false, // Ignore + "M": true, // Ignore "O": 1499405658657, // Order creation time - "Z": "0.00000000", // Cumulative quote asset transacted quantity - "Y": "0.00000000", // Last quote asset transacted quantity (i.e. lastPrice * lastQty) - "Q": "0.00000000" // Quote Order Qty + "Z": "0.1", // Cumulative quote asset transacted quantity + "Y": "0.00000000", // Last quote asset transacted quantity (i.e. lastPrice * lastQty) + "Q": "2.0" // Quote Order Quantity }` payload = jsCommentTrimmer.ReplaceAllLiteralString(payload, "") - event, err := ParseEvent(payload) + event, err := parseWebSocketEvent([]byte(payload)) assert.NoError(t, err) assert.NotNil(t, event) @@ -179,7 +181,232 @@ func TestParseOrderUpdate(t *testing.T) { assert.True(t, ok) assert.NotNil(t, executionReport) + assert.Equal(t, executionReport.Symbol, "ETHBTC") + assert.Equal(t, executionReport.Side, "BUY") + assert.Equal(t, executionReport.ClientOrderID, "mUvoqJxFIILMdfAW5iGSOW") + assert.Equal(t, executionReport.OriginalClientOrderID, "") + assert.Equal(t, executionReport.OrderType, "LIMIT") + assert.Equal(t, executionReport.OrderCreationTime, int64(1499405658657)) + assert.Equal(t, executionReport.TimeInForce, "GTC") + assert.Equal(t, executionReport.IcebergQuantity, fixedpoint.MustNewFromString("0.00000000")) + assert.Equal(t, executionReport.OrderQuantity, fixedpoint.MustNewFromString("1.00000000")) + assert.Equal(t, executionReport.QuoteOrderQuantity, fixedpoint.MustNewFromString("2.0")) + assert.Equal(t, executionReport.OrderPrice, fixedpoint.MustNewFromString("0.10264410")) + assert.Equal(t, executionReport.StopPrice, fixedpoint.MustNewFromString("0.222")) + assert.Equal(t, executionReport.IsOnBook, true) + assert.Equal(t, executionReport.IsMaker, false) + assert.Equal(t, executionReport.Ignore, true) + assert.Equal(t, executionReport.CommissionAmount, fixedpoint.MustNewFromString("0")) + assert.Equal(t, executionReport.CommissionAsset, "") + assert.Equal(t, executionReport.CurrentExecutionType, "NEW") + assert.Equal(t, executionReport.CurrentOrderStatus, "NEW") + assert.Equal(t, executionReport.OrderID, int64(4293153)) + assert.Equal(t, executionReport.Ignored, int64(8641984)) + assert.Equal(t, executionReport.TradeID, int64(-1)) + assert.Equal(t, executionReport.TransactionTime, int64(1499405658657)) + assert.Equal(t, executionReport.LastExecutedQuantity, fixedpoint.MustNewFromString("0.00000000")) + assert.Equal(t, executionReport.LastExecutedPrice, fixedpoint.MustNewFromString("0.00000001")) + assert.Equal(t, executionReport.CumulativeFilledQuantity, fixedpoint.MustNewFromString("0.00000000")) + assert.Equal(t, executionReport.CumulativeQuoteAssetTransactedQuantity, fixedpoint.MustNewFromString("0.1")) + assert.Equal(t, executionReport.LastQuoteAssetTransactedQuantity, fixedpoint.MustNewFromString("0.00000000")) + orderUpdate, err := executionReport.Order() assert.NoError(t, err) assert.NotNil(t, orderUpdate) } + +func TestFuturesResponseParsing(t *testing.T) { + type testcase struct { + input string + } + + var testcases = []testcase{ + { + input: `{ + "e": "ORDER_TRADE_UPDATE", + "T": 1639933384755, + "E": 1639933384763, + "o": { + "s": "BTCUSDT", + "c": "x-NSUYEBKMe60cf610-f5c7-49a4-9c1", + "S": "SELL", + "o": "MARKET", + "f": "GTC", + "q": "0.001", + "p": "0", + "ap": "0", + "sp": "0", + "x": "NEW", + "X": "NEW", + "i": 38541728873, + "l": "0", + "z": "0", + "L": "0", + "T": 1639933384755, + "t": 0, + "b": "0", + "a": "0", + "m": false, + "R": false, + "wt": "CONTRACT_PRICE", + "ot": "MARKET", + "ps": "BOTH", + "cp": false, + "rp": "0", + "pP": false, + "si": 0, + "ss": 0 + } + }`, + }, + { + input: `{ + "e": "ACCOUNT_UPDATE", + "T": 1639933384755, + "E": 1639933384763, + "a": { + "B": [ + { + "a": "USDT", + "wb": "86.94966888", + "cw": "86.94966888", + "bc": "0" + } + ], + "P": [ + { + "s": "BTCUSDT", + "pa": "-0.001", + "ep": "47202.40000", + "cr": "7.78107001", + "up": "-0.00233523", + "mt": "cross", + "iw": "0", + "ps": "BOTH", + "ma": "USDT" + } + ], + "m": "ORDER" + } + }`, + }, + { + input: `{ + "e": "ORDER_TRADE_UPDATE", + "T": 1639933384755, + "E": 1639933384763, + "o": { + "s": "BTCUSDT", + "c": "x-NSUYEBKMe60cf610-f5c7-49a4-9c1", + "S": "SELL", + "o": "MARKET", + "f": "GTC", + "q": "0.001", + "p": "0", + "ap": "47202.40000", + "sp": "0", + "x": "TRADE", + "X": "FILLED", + "i": 38541728873, + "l": "0.001", + "z": "0.001", + "L": "47202.40", + "n": "0.01888095", + "N": "USDT", + "T": 1639933384755, + "t": 1741505949, + "b": "0", + "a": "0", + "m": false, + "R": false, + "wt": "CONTRACT_PRICE", + "ot": "MARKET", + "ps": "BOTH", + "cp": false, + "rp": "0", + "pP": false, + "si": 0, + "ss": 0 + } + }`, + }, + } + + for _, testcase := range testcases { + payload := testcase.input + payload = jsCommentTrimmer.ReplaceAllLiteralString(payload, "") + event, err := parseWebSocketEvent([]byte(payload)) + assert.NoError(t, err) + assert.NotNil(t, event) + } +} + +func TestParseOrderFuturesUpdate(t *testing.T) { + payload := `{ + "e": "ORDER_TRADE_UPDATE", + "T": 1639933384755, + "E": 1639933384763, + "o": { + "s": "BTCUSDT", + "c": "x-NSUYEBKMe60cf610-f5c7-49a4-9c1", + "S": "SELL", + "o": "MARKET", + "f": "GTC", + "q": "0.001", + "p": "0", + "ap": "47202.40000", + "sp": "0", + "x": "TRADE", + "X": "FILLED", + "i": 38541728873, + "l": "0.001", + "z": "0.001", + "L": "47202.40", + "n": "0.01888095", + "N": "USDT", + "T": 1639933384755, + "t": 1741505949, + "b": "0", + "a": "0", + "m": false, + "R": false, + "wt": "CONTRACT_PRICE", + "ot": "MARKET", + "ps": "BOTH", + "cp": false, + "rp": "0", + "pP": false, + "si": 0, + "ss": 0 + } + }` + + payload = jsCommentTrimmer.ReplaceAllLiteralString(payload, "") + + event, err := parseWebSocketEvent([]byte(payload)) + assert.NoError(t, err) + assert.NotNil(t, event) + + orderTradeEvent, ok := event.(*OrderTradeUpdateEvent) + assert.True(t, ok) + assert.NotNil(t, orderTradeEvent) + + assert.Equal(t, orderTradeEvent.OrderTrade.Symbol, "BTCUSDT") + assert.Equal(t, orderTradeEvent.OrderTrade.Side, "SELL") + assert.Equal(t, orderTradeEvent.OrderTrade.ClientOrderID, "x-NSUYEBKMe60cf610-f5c7-49a4-9c1") + assert.Equal(t, orderTradeEvent.OrderTrade.OrderType, "MARKET") + assert.Equal(t, orderTradeEvent.Time, int64(1639933384763)) + assert.Equal(t, orderTradeEvent.OrderTrade.OrderTradeTime, int64(1639933384755)) + assert.Equal(t, orderTradeEvent.OrderTrade.OriginalQuantity, fixedpoint.MustNewFromString("0.001")) + assert.Equal(t, orderTradeEvent.OrderTrade.OrderLastFilledQuantity, fixedpoint.MustNewFromString("0.001")) + assert.Equal(t, orderTradeEvent.OrderTrade.OrderFilledAccumulatedQuantity, fixedpoint.MustNewFromString("0.001")) + assert.Equal(t, orderTradeEvent.OrderTrade.CurrentExecutionType, "TRADE") + assert.Equal(t, orderTradeEvent.OrderTrade.CurrentOrderStatus, "FILLED") + assert.Equal(t, orderTradeEvent.OrderTrade.LastFilledPrice, fixedpoint.MustNewFromString("47202.40")) + assert.Equal(t, orderTradeEvent.OrderTrade.OrderId, int64(38541728873)) + assert.Equal(t, orderTradeEvent.OrderTrade.TradeId, int64(1741505949)) + + orderUpdate, err := orderTradeEvent.OrderFutures() + assert.NoError(t, err) + assert.NotNil(t, orderUpdate) +} diff --git a/pkg/exchange/binance/reward.go b/pkg/exchange/binance/reward.go new file mode 100644 index 0000000000..8bf3dfdcf2 --- /dev/null +++ b/pkg/exchange/binance/reward.go @@ -0,0 +1,45 @@ +package binance + +import ( + "context" + "strconv" + "time" + + "github.com/c9s/bbgo/pkg/exchange/binance/binanceapi" + "github.com/c9s/bbgo/pkg/types" +) + +func (e *Exchange) QueryRewards(ctx context.Context, startTime time.Time) ([]types.Reward, error) { + req := e.client2.NewGetSpotRebateHistoryRequest() + req.StartTime(startTime) + history, err := req.Do(ctx) + if err != nil { + return nil, err + } + + var rewards []types.Reward + + for _, entry := range history { + t := types.RewardCommission + switch entry.Type { + case binanceapi.RebateTypeReferralKickback: + t = types.RewardReferralKickback + case binanceapi.RebateTypeCommission: + // use the default type + } + + rewards = append(rewards, types.Reward{ + UUID: strconv.FormatInt(entry.UpdateTime.Time().UnixMilli(), 10), + Exchange: types.ExchangeBinance, + Type: t, + Currency: entry.Asset, + Quantity: entry.Amount, + State: "done", + Note: "", + Spent: false, + CreatedAt: types.Time(entry.UpdateTime), + }) + } + + return rewards, nil +} diff --git a/pkg/exchange/binance/stream.go b/pkg/exchange/binance/stream.go index 4b80f58c23..54e3754a21 100644 --- a/pkg/exchange/binance/stream.go +++ b/pkg/exchange/binance/stream.go @@ -2,41 +2,30 @@ package binance import ( "context" - "fmt" - "math/rand" - "os" - "strconv" - "strings" + "net" "time" - "github.com/adshao/go-binance/v2" - "github.com/gorilla/websocket" + "github.com/c9s/bbgo/pkg/depth" + "github.com/c9s/bbgo/pkg/util" - "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/adshao/go-binance/v2" + "github.com/adshao/go-binance/v2/futures" "github.com/c9s/bbgo/pkg/types" ) -var debugBinanceDepth bool - -func init() { - // randomize pulling - rand.Seed(time.Now().UnixNano()) +// from Binance document: +// The websocket server will send a ping frame every 3 minutes. +// If the websocket server does not receive a pong frame back from the connection within a 10 minute period, the connection will be disconnected. +// Unsolicited pong frames are allowed. - if s := os.Getenv("BINANCE_DEBUG_DEPTH"); len(s) > 0 { - v, err := strconv.ParseBool(s) - if err != nil { - log.Error(err) - } else { - debugBinanceDepth = v - if debugBinanceDepth { - log.Info("binance depth debugging is enabled") - } - } - } -} +// WebSocket connections have a limit of 5 incoming messages per second. A message is considered: +// A PING frame +// A PONG frame +// A JSON controlled message (e.g. subscribe, unsubscribe) +const listenKeyKeepAliveInterval = 15 * time.Minute -type StreamRequest struct { +type WebSocketCommand struct { // request ID is required ID int `json:"id"` Method string `json:"method"` @@ -46,417 +35,438 @@ type StreamRequest struct { //go:generate callbackgen -type Stream -interface type Stream struct { types.MarginSettings - + types.FuturesSettings types.StandardStream - Client *binance.Client - ListenKey string - Conn *websocket.Conn - - publicOnly bool + client *binance.Client + futuresClient *futures.Client // custom callbacks depthEventCallbacks []func(e *DepthEvent) kLineEventCallbacks []func(e *KLineEvent) kLineClosedEventCallbacks []func(e *KLineEvent) + markPriceUpdateEventCallbacks []func(e *MarkPriceUpdateEvent) + marketTradeEventCallbacks []func(e *MarketTradeEvent) + + continuousKLineEventCallbacks []func(e *ContinuousKLineEvent) + continuousKLineClosedEventCallbacks []func(e *ContinuousKLineEvent) + balanceUpdateEventCallbacks []func(event *BalanceUpdateEvent) outboundAccountInfoEventCallbacks []func(event *OutboundAccountInfoEvent) outboundAccountPositionEventCallbacks []func(event *OutboundAccountPositionEvent) executionReportEventCallbacks []func(event *ExecutionReportEvent) + bookTickerEventCallbacks []func(event *BookTickerEvent) + + orderTradeUpdateEventCallbacks []func(e *OrderTradeUpdateEvent) + accountUpdateEventCallbacks []func(e *AccountUpdateEvent) + accountConfigUpdateEventCallbacks []func(e *AccountConfigUpdateEvent) - depthFrames map[string]*DepthFrame + depthBuffers map[string]*depth.Buffer } -func NewStream(client *binance.Client) *Stream { +func NewStream(ex *Exchange, client *binance.Client, futuresClient *futures.Client) *Stream { stream := &Stream{ - Client: client, - depthFrames: make(map[string]*DepthFrame), + StandardStream: types.NewStandardStream(), + client: client, + futuresClient: futuresClient, + depthBuffers: make(map[string]*depth.Buffer), } + stream.SetParser(parseWebSocketEvent) + stream.SetDispatcher(stream.dispatchEvent) + stream.SetEndpointCreator(stream.createEndpoint) + stream.OnDepthEvent(func(e *DepthEvent) { - f, ok := stream.depthFrames[e.Symbol] - if !ok { - f = &DepthFrame{ - client: client, - context: context.Background(), - Symbol: e.Symbol, + f, ok := stream.depthBuffers[e.Symbol] + if ok { + err := f.AddUpdate(types.SliceOrderBook{ + Symbol: e.Symbol, + Bids: e.Bids, + Asks: e.Asks, + }, e.FirstUpdateID, e.FinalUpdateID) + if err != nil { + log.WithError(err).Errorf("found missing %s update event", e.Symbol) } - - stream.depthFrames[e.Symbol] = f - - f.OnReady(func(e DepthEvent, bufEvents []DepthEvent) { - snapshot, err := e.OrderBook() - if err != nil { - log.WithError(err).Error("book convert error") + } else { + f = depth.NewBuffer(func() (types.SliceOrderBook, int64, error) { + log.Infof("fetching %s depth...", e.Symbol) + return ex.QueryDepth(context.Background(), e.Symbol) + }) + f.SetBufferingPeriod(time.Second) + f.OnReady(func(snapshot types.SliceOrderBook, updates []depth.Update) { + if valid, err := snapshot.IsValid(); !valid { + log.Errorf("%s depth snapshot is invalid, error: %v", e.Symbol, err) return } - if !snapshot.IsValid() { - log.Warnf("depth snapshot is invalid, event: %+v", e) - } - stream.EmitBookSnapshot(snapshot) - - for _, e := range bufEvents { - book, err := e.OrderBook() - if err != nil { - log.WithError(err).Error("book convert error") - return - } - - stream.EmitBookUpdate(book) + for _, u := range updates { + stream.EmitBookUpdate(u.Object) } }) - - f.OnPush(func(e DepthEvent) { - book, err := e.OrderBook() - if err != nil { - log.WithError(err).Error("book convert error") - return - } - - stream.EmitBookUpdate(book) + f.OnPush(func(update depth.Update) { + stream.EmitBookUpdate(update.Object) }) - } else { - f.PushEvent(*e) + stream.depthBuffers[e.Symbol] = f } }) - stream.OnOutboundAccountPositionEvent(func(e *OutboundAccountPositionEvent) { - snapshot := types.BalanceMap{} - for _, balance := range e.Balances { - available := fixedpoint.Must(fixedpoint.NewFromString(balance.Free)) - locked := fixedpoint.Must(fixedpoint.NewFromString(balance.Locked)) - snapshot[balance.Asset] = types.Balance{ - Currency: balance.Asset, - Available: available, - Locked: locked, - } - } - stream.EmitBalanceSnapshot(snapshot) - }) - - stream.OnKLineEvent(func(e *KLineEvent) { - kline := e.KLine.KLine() - if e.KLine.Closed { - stream.EmitKLineClosedEvent(e) - stream.EmitKLineClosed(kline) - } else { - stream.EmitKLine(kline) - } - }) + stream.OnOutboundAccountPositionEvent(stream.handleOutboundAccountPositionEvent) + stream.OnKLineEvent(stream.handleKLineEvent) + stream.OnBookTickerEvent(stream.handleBookTickerEvent) + stream.OnExecutionReportEvent(stream.handleExecutionReportEvent) + stream.OnContinuousKLineEvent(stream.handleContinuousKLineEvent) + stream.OnMarketTradeEvent(stream.handleMarketTradeEvent) + + // Event type ACCOUNT_UPDATE from user data stream updates Balance and FuturesPosition. + stream.OnAccountUpdateEvent(stream.handleAccountUpdateEvent) + stream.OnAccountConfigUpdateEvent(stream.handleAccountConfigUpdateEvent) + stream.OnOrderTradeUpdateEvent(stream.handleOrderTradeUpdateEvent) + stream.OnDisconnect(stream.handleDisconnect) + stream.OnConnect(stream.handleConnect) + return stream +} - stream.OnExecutionReportEvent(func(e *ExecutionReportEvent) { - switch e.CurrentExecutionType { +func (s *Stream) handleDisconnect() { + log.Debugf("resetting depth snapshots...") + for _, f := range s.depthBuffers { + f.Reset() + } +} - case "NEW", "CANCELED", "REJECTED", "EXPIRED", "REPLACED": - order, err := e.Order() - if err != nil { - log.WithError(err).Error("order convert error") - return - } +func (s *Stream) handleConnect() { + if !s.PublicOnly { + return + } - stream.EmitOrderUpdate(*order) + var params []string + for _, subscription := range s.Subscriptions { + params = append(params, convertSubscription(subscription)) + } - case "TRADE": - trade, err := e.Trade() - if err != nil { - log.WithError(err).Error("trade convert error") - return - } + if len(params) == 0 { + return + } - stream.EmitTradeUpdate(*trade) - } + log.Infof("subscribing channels: %+v", params) + err := s.Conn.WriteJSON(WebSocketCommand{ + Method: "SUBSCRIBE", + Params: params, + ID: 1, }) - stream.OnConnect(func() { - // reset the previous frames - for _, f := range stream.depthFrames { - f.Reset() - } + if err != nil { + log.WithError(err).Error("subscribe error") + } +} + +func (s *Stream) handleContinuousKLineEvent(e *ContinuousKLineEvent) { + kline := e.KLine.KLine() + if e.KLine.Closed { + s.EmitContinuousKLineClosedEvent(e) + s.EmitKLineClosed(kline) + } else { + s.EmitKLine(kline) + } +} + +func (s *Stream) handleExecutionReportEvent(e *ExecutionReportEvent) { + switch e.CurrentExecutionType { - var params []string - for _, subscription := range stream.Subscriptions { - params = append(params, convertSubscription(subscription)) + case "NEW", "CANCELED", "REJECTED", "EXPIRED", "REPLACED": + order, err := e.Order() + if err != nil { + log.WithError(err).Error("order convert error") + return } - if len(params) == 0 { + s.EmitOrderUpdate(*order) + + case "TRADE": + trade, err := e.Trade() + if err != nil { + log.WithError(err).Error("trade convert error") return } - log.Infof("subscribing channels: %+v", params) - err := stream.Conn.WriteJSON(StreamRequest{ - Method: "SUBSCRIBE", - Params: params, - ID: 1, - }) + s.EmitTradeUpdate(*trade) + order, err := e.Order() if err != nil { - log.WithError(err).Error("subscribe error") + log.WithError(err).Error("order convert error") + return } - }) - return stream + // Update Order with FILLED event + if order.Status == types.OrderStatusFilled { + s.EmitOrderUpdate(*order) + } + } } -func (s *Stream) SetPublicOnly() { - s.publicOnly = true +func (s *Stream) handleBookTickerEvent(e *BookTickerEvent) { + s.EmitBookTickerUpdate(e.BookTicker()) } -func (s *Stream) dial(listenKey string) (*websocket.Conn, error) { - var url string - if s.publicOnly { - url = "wss://stream.binance.com:9443/ws" - } else { - url = "wss://stream.binance.com:9443/ws/" + listenKey - } +func (s *Stream) handleMarketTradeEvent(e *MarketTradeEvent) { + s.EmitMarketTrade(e.Trade()) +} - conn, _, err := websocket.DefaultDialer.Dial(url, nil) - if err != nil { - return nil, err +func (s *Stream) handleKLineEvent(e *KLineEvent) { + kline := e.KLine.KLine() + if e.KLine.Closed { + s.EmitKLineClosedEvent(e) + s.EmitKLineClosed(kline) + } else { + s.EmitKLine(kline) } - - return conn, nil } -func (s *Stream) fetchListenKey(ctx context.Context) (string, error) { - if s.IsMargin { - if s.IsIsolatedMargin { - log.Infof("isolated margin %s is enabled, requesting margin user stream listen key...", s.IsolatedMarginSymbol) - req := s.Client.NewStartIsolatedMarginUserStreamService() - req.Symbol(s.IsolatedMarginSymbol) - return req.Do(ctx) +func (s *Stream) handleOutboundAccountPositionEvent(e *OutboundAccountPositionEvent) { + snapshot := types.BalanceMap{} + for _, balance := range e.Balances { + snapshot[balance.Asset] = types.Balance{ + Currency: balance.Asset, + Available: balance.Free, + Locked: balance.Locked, } - - log.Infof("margin mode is enabled, requesting margin user stream listen key...") - req := s.Client.NewStartMarginUserStreamService() - return req.Do(ctx) } - - return s.Client.NewStartUserStreamService().Do(ctx) + s.EmitBalanceSnapshot(snapshot) } -func (s *Stream) keepaliveListenKey(ctx context.Context, listenKey string) error { - if s.IsMargin { - if s.IsIsolatedMargin { - req := s.Client.NewKeepaliveIsolatedMarginUserStreamService().ListenKey(listenKey) - req.Symbol(s.IsolatedMarginSymbol) - return req.Do(ctx) - } +func (s *Stream) handleAccountUpdateEvent(e *AccountUpdateEvent) { + futuresPositionSnapshot := toGlobalFuturesPositions(e.AccountUpdate.Positions) + s.EmitFuturesPositionSnapshot(futuresPositionSnapshot) - req := s.Client.NewKeepaliveMarginUserStreamService().ListenKey(listenKey) - return req.Do(ctx) - } + balanceSnapshot := toGlobalFuturesBalance(e.AccountUpdate.Balances) + s.EmitBalanceSnapshot(balanceSnapshot) +} - return s.Client.NewKeepaliveUserStreamService().ListenKey(listenKey).Do(ctx) +// TODO: emit account config leverage updates +func (s *Stream) handleAccountConfigUpdateEvent(e *AccountConfigUpdateEvent) { } -func (s *Stream) connect(ctx context.Context) error { - if s.publicOnly { - log.Infof("stream is set to public only mode") - } else { - log.Infof("request listen key for creating user data stream...") +func (s *Stream) handleOrderTradeUpdateEvent(e *OrderTradeUpdateEvent) { + switch e.OrderTrade.CurrentExecutionType { - listenKey, err := s.fetchListenKey(ctx) + case "NEW", "CANCELED", "EXPIRED": + order, err := e.OrderFutures() if err != nil { - return err + log.WithError(err).Error("futures order convert error") + return } - s.ListenKey = listenKey - log.Infof("user data stream created. listenKey: %s", maskListenKey(s.ListenKey)) - } + s.EmitOrderUpdate(*order) - conn, err := s.dial(s.ListenKey) - if err != nil { - return err - } + case "TRADE": + trade, err := e.TradeFutures() + if err != nil { + log.WithError(err).Error("futures trade convert error") + return + } + + s.EmitTradeUpdate(*trade) - log.Infof("websocket connected") - s.Conn = conn + case "CALCULATED - Liquidation Execution": + log.Infof("CALCULATED - Liquidation Execution not support yet.") + } - s.EmitConnect() - return nil } -func convertSubscription(s types.Subscription) string { - // binance uses lower case symbol name, - // for kline, it's "@kline_" - // for depth, it's "@depth OR @depth@100ms" - switch s.Channel { - case types.KLineChannel: - return fmt.Sprintf("%s@%s_%s", strings.ToLower(s.Symbol), s.Channel, s.Options.String()) +func (s *Stream) getEndpointUrl(listenKey string) string { + var url string - case types.BookChannel: - return fmt.Sprintf("%s@depth", strings.ToLower(s.Symbol)) + if s.IsFutures { + url = FuturesWebSocketURL + "/ws" + } else if isBinanceUs() { + url = BinanceUSWebSocketURL + "/ws" + } else { + url = WebSocketURL + "/ws" } - return fmt.Sprintf("%s@%s", strings.ToLower(s.Symbol), s.Channel) -} - -func (s *Stream) Connect(ctx context.Context) error { - err := s.connect(ctx) - if err != nil { - return err + if !s.PublicOnly { + url += "/" + listenKey } - go s.read(ctx) - return nil + return url } -func (s *Stream) read(ctx context.Context) { - - pingTicker := time.NewTicker(20 * time.Second) - defer pingTicker.Stop() - - keepAliveTicker := time.NewTicker(5 * time.Minute) - defer keepAliveTicker.Stop() - - go func() { - for { - select { - - case <-ctx.Done(): - return +func (s *Stream) createEndpoint(ctx context.Context) (string, error) { + var err error + var listenKey string + if s.PublicOnly { + log.Debugf("stream is set to public only mode") + } else { + listenKey, err = s.fetchListenKey(ctx) + if err != nil { + return "", err + } - case <-pingTicker.C: - if err := s.Conn.WriteControl(websocket.PingMessage, []byte("hb"), time.Now().Add(1*time.Second)); err != nil { - log.WithError(err).Error("ping error", err) - } + log.Debugf("listen key is created: %s", util.MaskKey(listenKey)) + go s.listenKeyKeepAlive(ctx, listenKey) + } - case <-keepAliveTicker.C: - if !s.publicOnly { - if err := s.keepaliveListenKey(ctx, s.ListenKey); err != nil { - log.WithError(err).Errorf("listen key keep-alive error: %v key: %s", err, maskListenKey(s.ListenKey)) - } - } + url := s.getEndpointUrl(listenKey) + return url, nil +} - } - } - }() +func (s *Stream) dispatchEvent(e interface{}) { + switch e := e.(type) { - for { - select { + case *OutboundAccountPositionEvent: + s.EmitOutboundAccountPositionEvent(e) - case <-ctx.Done(): - return + case *OutboundAccountInfoEvent: + s.EmitOutboundAccountInfoEvent(e) - default: - if err := s.Conn.SetReadDeadline(time.Now().Add(1 * time.Minute)); err != nil { - log.WithError(err).Errorf("set read deadline error: %s", err.Error()) - } + case *BalanceUpdateEvent: + s.EmitBalanceUpdateEvent(e) - mt, message, err := s.Conn.ReadMessage() - if err != nil { - if websocket.IsUnexpectedCloseError(err, websocket.CloseGoingAway) { - log.WithError(err).Errorf("read error: %s", err.Error()) - } + case *MarketTradeEvent: + s.EmitMarketTradeEvent(e) - // reconnect - for err != nil { - select { - case <-ctx.Done(): - return + case *KLineEvent: + s.EmitKLineEvent(e) - default: - if !s.publicOnly { - if err := s.invalidateListenKey(ctx, s.ListenKey); err != nil { - log.WithError(err).Error("invalidate listen key error") - } - } - - err = s.connect(ctx) - time.Sleep(5 * time.Second) - } - } + case *BookTickerEvent: + s.EmitBookTickerEvent(e) - continue - } + case *DepthEvent: + s.EmitDepthEvent(e) - // skip non-text messages - if mt != websocket.TextMessage { - continue - } + case *ExecutionReportEvent: + s.EmitExecutionReportEvent(e) - log.Debug(string(message)) + case *MarkPriceUpdateEvent: + s.EmitMarkPriceUpdateEvent(e) - e, err := ParseEvent(string(message)) - if err != nil { - log.WithError(err).Errorf("[binance] event parse error") - continue - } + case *ContinuousKLineEvent: + s.EmitContinuousKLineEvent(e) - // log.NotifyTo("[binance] event: %+v", e) - switch e := e.(type) { + case *OrderTradeUpdateEvent: + s.EmitOrderTradeUpdateEvent(e) - case *OutboundAccountPositionEvent: - log.Info(e.Event, " ", e.Balances) - s.EmitOutboundAccountPositionEvent(e) + case *AccountUpdateEvent: + s.EmitAccountUpdateEvent(e) - case *OutboundAccountInfoEvent: - log.Info(e.Event, " ", e.Balances) - s.EmitOutboundAccountInfoEvent(e) + case *AccountConfigUpdateEvent: + s.EmitAccountConfigUpdateEvent(e) + } +} - case *BalanceUpdateEvent: - log.Info(e.Event, " ", e.Asset, " ", e.Delta) - s.EmitBalanceUpdateEvent(e) +func (s *Stream) fetchListenKey(ctx context.Context) (string, error) { + if s.IsMargin { + if s.IsIsolatedMargin { + log.Debugf("isolated margin %s is enabled, requesting margin user stream listen key...", s.IsolatedMarginSymbol) + req := s.client.NewStartIsolatedMarginUserStreamService() + req.Symbol(s.IsolatedMarginSymbol) + return req.Do(ctx) + } - case *KLineEvent: - s.EmitKLineEvent(e) + log.Debugf("margin mode is enabled, requesting margin user stream listen key...") + req := s.client.NewStartMarginUserStreamService() + return req.Do(ctx) + } else if s.IsFutures { + log.Debugf("futures mode is enabled, requesting futures user stream listen key...") + req := s.futuresClient.NewStartUserStreamService() + return req.Do(ctx) + } - case *DepthEvent: - s.EmitDepthEvent(e) + log.Debugf("spot mode is enabled, requesting user stream listen key...") + return s.client.NewStartUserStreamService().Do(ctx) +} - case *ExecutionReportEvent: - log.Info(e.Event, " ", e) - s.EmitExecutionReportEvent(e) - } +func (s *Stream) keepaliveListenKey(ctx context.Context, listenKey string) error { + log.Debugf("keepalive listen key: %s", util.MaskKey(listenKey)) + if s.IsMargin { + if s.IsIsolatedMargin { + req := s.client.NewKeepaliveIsolatedMarginUserStreamService().ListenKey(listenKey) + req.Symbol(s.IsolatedMarginSymbol) + return req.Do(ctx) } + req := s.client.NewKeepaliveMarginUserStreamService().ListenKey(listenKey) + return req.Do(ctx) + } else if s.IsFutures { + req := s.futuresClient.NewKeepaliveUserStreamService().ListenKey(listenKey) + return req.Do(ctx) } + + return s.client.NewKeepaliveUserStreamService().ListenKey(listenKey).Do(ctx) } -func (s *Stream) invalidateListenKey(ctx context.Context, listenKey string) (err error) { +func (s *Stream) closeListenKey(ctx context.Context, listenKey string) (err error) { // should use background context to invalidate the user stream - log.Info("closing listen key") + log.Debugf("closing listen key: %s", util.MaskKey(listenKey)) if s.IsMargin { if s.IsIsolatedMargin { - req := s.Client.NewCloseIsolatedMarginUserStreamService().ListenKey(listenKey) + req := s.client.NewCloseIsolatedMarginUserStreamService().ListenKey(listenKey) req.Symbol(s.IsolatedMarginSymbol) err = req.Do(ctx) } else { - req := s.Client.NewCloseMarginUserStreamService().ListenKey(listenKey) + req := s.client.NewCloseMarginUserStreamService().ListenKey(listenKey) err = req.Do(ctx) } + } else if s.IsFutures { + req := s.futuresClient.NewCloseUserStreamService().ListenKey(listenKey) + err = req.Do(ctx) } else { - err = s.Client.NewCloseUserStreamService().ListenKey(listenKey).Do(ctx) - } - - if err != nil { - log.WithError(err).Error("error deleting listen key") - return err + err = s.client.NewCloseUserStreamService().ListenKey(listenKey).Do(ctx) } - return nil + return err } -func (s *Stream) Close() error { - log.Infof("closing user data stream...") +// listenKeyKeepAlive +// From Binance +// Keepalive a user data stream to prevent a time out. User data streams will close after 60 minutes. +// It's recommended to send a ping about every 30 minutes. +func (s *Stream) listenKeyKeepAlive(ctx context.Context, listenKey string) { + keepAliveTicker := time.NewTicker(listenKeyKeepAliveInterval) + defer keepAliveTicker.Stop() - if !s.publicOnly { - if err := s.invalidateListenKey(context.Background(), s.ListenKey); err != nil { - log.WithError(err).Error("invalidate listen key error") + // if we exit, we should invalidate the existing listen key + defer func() { + log.Debugf("keepalive worker stopped") + if err := s.closeListenKey(context.Background(), listenKey); err != nil { + log.WithError(err).Errorf("close listen key error: %v key: %s", err, util.MaskKey(listenKey)) } - log.Infof("user data stream closed") - } + }() - return s.Conn.Close() -} + log.Debugf("starting listen key keep alive worker with interval %s, listen key = %s", listenKeyKeepAliveInterval, util.MaskKey(listenKey)) -func maskListenKey(listenKey string) string { - maskKey := listenKey[0:5] - return maskKey + strings.Repeat("*", len(listenKey)-1-5) -} + for { + select { -//go:generate callbackgen -type DepthFrame + case <-s.CloseC: + return + + case <-ctx.Done(): + return + + case <-keepAliveTicker.C: + for i := 0; i < 5; i++ { + err := s.keepaliveListenKey(ctx, listenKey) + if err != nil { + time.Sleep(5 * time.Second) + switch err.(type) { + case net.Error: + log.WithError(err).Errorf("listen key keep-alive network error: %v key: %s", err, util.MaskKey(listenKey)) + continue + + default: + log.WithError(err).Errorf("listen key keep-alive unexpected error: %v key: %s", err, util.MaskKey(listenKey)) + s.Reconnect() + return + + } + } else { + break + } + } + + } + } +} diff --git a/pkg/exchange/binance/stream_callbacks.go b/pkg/exchange/binance/stream_callbacks.go index 79bf6bd7fc..9f9d3cea9f 100644 --- a/pkg/exchange/binance/stream_callbacks.go +++ b/pkg/exchange/binance/stream_callbacks.go @@ -34,6 +34,46 @@ func (s *Stream) EmitKLineClosedEvent(e *KLineEvent) { } } +func (s *Stream) OnMarkPriceUpdateEvent(cb func(e *MarkPriceUpdateEvent)) { + s.markPriceUpdateEventCallbacks = append(s.markPriceUpdateEventCallbacks, cb) +} + +func (s *Stream) EmitMarkPriceUpdateEvent(e *MarkPriceUpdateEvent) { + for _, cb := range s.markPriceUpdateEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnMarketTradeEvent(cb func(e *MarketTradeEvent)) { + s.marketTradeEventCallbacks = append(s.marketTradeEventCallbacks, cb) +} + +func (s *Stream) EmitMarketTradeEvent(e *MarketTradeEvent) { + for _, cb := range s.marketTradeEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnContinuousKLineEvent(cb func(e *ContinuousKLineEvent)) { + s.continuousKLineEventCallbacks = append(s.continuousKLineEventCallbacks, cb) +} + +func (s *Stream) EmitContinuousKLineEvent(e *ContinuousKLineEvent) { + for _, cb := range s.continuousKLineEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnContinuousKLineClosedEvent(cb func(e *ContinuousKLineEvent)) { + s.continuousKLineClosedEventCallbacks = append(s.continuousKLineClosedEventCallbacks, cb) +} + +func (s *Stream) EmitContinuousKLineClosedEvent(e *ContinuousKLineEvent) { + for _, cb := range s.continuousKLineClosedEventCallbacks { + cb(e) + } +} + func (s *Stream) OnBalanceUpdateEvent(cb func(event *BalanceUpdateEvent)) { s.balanceUpdateEventCallbacks = append(s.balanceUpdateEventCallbacks, cb) } @@ -74,6 +114,46 @@ func (s *Stream) EmitExecutionReportEvent(event *ExecutionReportEvent) { } } +func (s *Stream) OnBookTickerEvent(cb func(event *BookTickerEvent)) { + s.bookTickerEventCallbacks = append(s.bookTickerEventCallbacks, cb) +} + +func (s *Stream) EmitBookTickerEvent(event *BookTickerEvent) { + for _, cb := range s.bookTickerEventCallbacks { + cb(event) + } +} + +func (s *Stream) OnOrderTradeUpdateEvent(cb func(e *OrderTradeUpdateEvent)) { + s.orderTradeUpdateEventCallbacks = append(s.orderTradeUpdateEventCallbacks, cb) +} + +func (s *Stream) EmitOrderTradeUpdateEvent(e *OrderTradeUpdateEvent) { + for _, cb := range s.orderTradeUpdateEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnAccountUpdateEvent(cb func(e *AccountUpdateEvent)) { + s.accountUpdateEventCallbacks = append(s.accountUpdateEventCallbacks, cb) +} + +func (s *Stream) EmitAccountUpdateEvent(e *AccountUpdateEvent) { + for _, cb := range s.accountUpdateEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnAccountConfigUpdateEvent(cb func(e *AccountConfigUpdateEvent)) { + s.accountConfigUpdateEventCallbacks = append(s.accountConfigUpdateEventCallbacks, cb) +} + +func (s *Stream) EmitAccountConfigUpdateEvent(e *AccountConfigUpdateEvent) { + for _, cb := range s.accountConfigUpdateEventCallbacks { + cb(e) + } +} + type StreamEventHub interface { OnDepthEvent(cb func(e *DepthEvent)) @@ -81,6 +161,14 @@ type StreamEventHub interface { OnKLineClosedEvent(cb func(e *KLineEvent)) + OnMarkPriceUpdateEvent(cb func(e *MarkPriceUpdateEvent)) + + OnMarketTradeEvent(cb func(e *MarketTradeEvent)) + + OnContinuousKLineEvent(cb func(e *ContinuousKLineEvent)) + + OnContinuousKLineClosedEvent(cb func(e *ContinuousKLineEvent)) + OnBalanceUpdateEvent(cb func(event *BalanceUpdateEvent)) OnOutboundAccountInfoEvent(cb func(event *OutboundAccountInfoEvent)) @@ -88,4 +176,12 @@ type StreamEventHub interface { OnOutboundAccountPositionEvent(cb func(event *OutboundAccountPositionEvent)) OnExecutionReportEvent(cb func(event *ExecutionReportEvent)) + + OnBookTickerEvent(cb func(event *BookTickerEvent)) + + OnOrderTradeUpdateEvent(cb func(e *OrderTradeUpdateEvent)) + + OnAccountUpdateEvent(cb func(e *AccountUpdateEvent)) + + OnAccountConfigUpdateEvent(cb func(e *AccountConfigUpdateEvent)) } diff --git a/pkg/exchange/binance/ticker_test.go b/pkg/exchange/binance/ticker_test.go new file mode 100644 index 0000000000..3bdee463c1 --- /dev/null +++ b/pkg/exchange/binance/ticker_test.go @@ -0,0 +1,54 @@ +package binance + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestExchange_QueryTickers_AllSymbols(t *testing.T) { + key := os.Getenv("BINANCE_API_KEY") + secret := os.Getenv("BINANCE_API_SECRET") + if len(key) == 0 && len(secret) == 0 { + t.Skip("api key/secret are not configured") + return + } + + e := New(key, secret) + got, err := e.QueryTickers(context.Background()) + if assert.NoError(t, err) { + assert.True(t, len(got) > 1, "binance: attempting to get all symbol tickers, but get 1 or less") + } +} + +func TestExchange_QueryTickers_SomeSymbols(t *testing.T) { + key := os.Getenv("BINANCE_API_KEY") + secret := os.Getenv("BINANCE_API_SECRET") + if len(key) == 0 && len(secret) == 0 { + t.Skip("api key/secret are not configured") + return + } + + e := New(key, secret) + got, err := e.QueryTickers(context.Background(), "BTCUSDT", "ETHUSDT") + if assert.NoError(t, err) { + assert.Len(t, got, 2, "binance: attempting to get two symbols, but number of tickers do not match") + } +} + +func TestExchange_QueryTickers_SingleSymbol(t *testing.T) { + key := os.Getenv("BINANCE_API_KEY") + secret := os.Getenv("BINANCE_API_SECRET") + if len(key) == 0 && len(secret) == 0 { + t.Skip("api key/secret are not configured") + return + } + + e := New(key, secret) + got, err := e.QueryTickers(context.Background(), "BTCUSDT") + if assert.NoError(t, err) { + assert.Len(t, got, 1, "binance: attempting to get one symbol, but number of tickers do not match") + } +} diff --git a/pkg/exchange/binance/trade.go b/pkg/exchange/binance/trade.go deleted file mode 100644 index ee8f0e66e0..0000000000 --- a/pkg/exchange/binance/trade.go +++ /dev/null @@ -1,22 +0,0 @@ -package binance - -import "github.com/adshao/go-binance/v2" - -func BuyerOrSellerLabel(trade *binance.TradeV3) (o string) { - if trade.IsBuyer { - o = "BUYER" - } else { - o = "SELLER" - } - return o -} - -func MakerOrTakerLabel(trade *binance.TradeV3) (o string) { - if trade.IsMaker { - o += "MAKER" - } else { - o += "TAKER" - } - - return o -} diff --git a/pkg/exchange/factory.go b/pkg/exchange/factory.go new file mode 100644 index 0000000000..d03f8654e9 --- /dev/null +++ b/pkg/exchange/factory.go @@ -0,0 +1,65 @@ +package exchange + +import ( + "fmt" + "os" + "strings" + + "github.com/c9s/bbgo/pkg/exchange/binance" + "github.com/c9s/bbgo/pkg/exchange/ftx" + "github.com/c9s/bbgo/pkg/exchange/kucoin" + "github.com/c9s/bbgo/pkg/exchange/max" + "github.com/c9s/bbgo/pkg/exchange/okex" + "github.com/c9s/bbgo/pkg/types" +) + +func NewPublic(exchangeName types.ExchangeName) (types.Exchange, error) { + return NewStandard(exchangeName, "", "", "", "") +} + +func NewStandard(n types.ExchangeName, key, secret, passphrase, subAccount string) (types.Exchange, error) { + switch n { + + case types.ExchangeFTX: + return ftx.NewExchange(key, secret, subAccount), nil + + case types.ExchangeBinance: + return binance.New(key, secret), nil + + case types.ExchangeMax: + return max.New(key, secret), nil + + case types.ExchangeOKEx: + return okex.New(key, secret, passphrase), nil + + case types.ExchangeKucoin: + return kucoin.New(key, secret, passphrase), nil + + default: + return nil, fmt.Errorf("unsupported exchange: %v", n) + + } +} + +func NewWithEnvVarPrefix(n types.ExchangeName, varPrefix string) (types.Exchange, error) { + if len(varPrefix) == 0 { + varPrefix = n.String() + } + + varPrefix = strings.ToUpper(varPrefix) + + key := os.Getenv(varPrefix + "_API_KEY") + secret := os.Getenv(varPrefix + "_API_SECRET") + if len(key) == 0 || len(secret) == 0 { + return nil, fmt.Errorf("can not initialize exchange %s: empty key or secret, env var prefix: %s", n, varPrefix) + } + + passphrase := os.Getenv(varPrefix + "_API_PASSPHRASE") + subAccount := os.Getenv(varPrefix + "_SUBACCOUNT") + return NewStandard(n, key, secret, passphrase, subAccount) +} + +// New constructor exchange object from viper config. +func New(n types.ExchangeName) (types.Exchange, error) { + return NewWithEnvVarPrefix(n, "") +} diff --git a/pkg/exchange/ftx/convert.go b/pkg/exchange/ftx/convert.go new file mode 100644 index 0000000000..f40bc73bb3 --- /dev/null +++ b/pkg/exchange/ftx/convert.go @@ -0,0 +1,249 @@ +package ftx + +import ( + "fmt" + "strings" + "time" + + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/exchange/ftx/ftxapi" + "github.com/c9s/bbgo/pkg/types" +) + +func toGlobalCurrency(original string) string { + return TrimUpperString(original) +} + +func toGlobalSymbol(original string) string { + return strings.ReplaceAll(TrimUpperString(original), "/", "") +} + +func toLocalSymbol(original string) string { + if symbolMap[original] == "" { + return original + } + + return symbolMap[original] +} + +func TrimUpperString(original string) string { + return strings.ToUpper(strings.TrimSpace(original)) +} + +func TrimLowerString(original string) string { + return strings.ToLower(strings.TrimSpace(original)) +} + +var errUnsupportedOrderStatus = fmt.Errorf("unsupported order status") + +func toGlobalOrderNew(r ftxapi.Order) (types.Order, error) { + // In exchange/max/convert.go, it only parses these fields. + timeInForce := types.TimeInForceGTC + if r.Ioc { + timeInForce = types.TimeInForceIOC + } + + // order type definition: https://github.com/ftexchange/ftx/blob/master/rest/client.py#L122 + orderType := types.OrderType(TrimUpperString(string(r.Type))) + if orderType == types.OrderTypeLimit && r.PostOnly { + orderType = types.OrderTypeLimitMaker + } + + o := types.Order{ + SubmitOrder: types.SubmitOrder{ + ClientOrderID: r.ClientId, + Symbol: toGlobalSymbol(r.Market), + Side: types.SideType(TrimUpperString(string(r.Side))), + Type: orderType, + Quantity: r.Size, + Price: r.Price, + TimeInForce: timeInForce, + }, + Exchange: types.ExchangeFTX, + IsWorking: r.Status == ftxapi.OrderStatusOpen || r.Status == ftxapi.OrderStatusNew, + OrderID: uint64(r.Id), + Status: "", + ExecutedQuantity: r.FilledSize, + CreationTime: types.Time(r.CreatedAt), + UpdateTime: types.Time(r.CreatedAt), + } + + s, err := toGlobalOrderStatus(r, r.Status) + o.Status = s + return o, err +} + +func toGlobalOrderStatus(o ftxapi.Order, s ftxapi.OrderStatus) (types.OrderStatus, error) { + switch s { + case ftxapi.OrderStatusNew: + return types.OrderStatusNew, nil + + case ftxapi.OrderStatusOpen: + if !o.FilledSize.IsZero() { + return types.OrderStatusPartiallyFilled, nil + } else { + return types.OrderStatusNew, nil + } + case ftxapi.OrderStatusClosed: + // filled or canceled + if o.FilledSize == o.Size { + return types.OrderStatusFilled, nil + } else { + // can't distinguish it's canceled or rejected from order response, so always set to canceled + return types.OrderStatusCanceled, nil + } + } + + return "", fmt.Errorf("unsupported ftx order status %s: %w", s, errUnsupportedOrderStatus) +} + +func toGlobalOrder(r order) (types.Order, error) { + // In exchange/max/convert.go, it only parses these fields. + timeInForce := types.TimeInForceGTC + if r.Ioc { + timeInForce = types.TimeInForceIOC + } + + // order type definition: https://github.com/ftexchange/ftx/blob/master/rest/client.py#L122 + orderType := types.OrderType(TrimUpperString(r.Type)) + if orderType == types.OrderTypeLimit && r.PostOnly { + orderType = types.OrderTypeLimitMaker + } + + o := types.Order{ + SubmitOrder: types.SubmitOrder{ + ClientOrderID: r.ClientId, + Symbol: toGlobalSymbol(r.Market), + Side: types.SideType(TrimUpperString(r.Side)), + Type: orderType, + Quantity: r.Size, + Price: r.Price, + TimeInForce: timeInForce, + }, + Exchange: types.ExchangeFTX, + IsWorking: r.Status == "open", + OrderID: uint64(r.ID), + Status: "", + ExecutedQuantity: r.FilledSize, + CreationTime: types.Time(r.CreatedAt.Time), + UpdateTime: types.Time(r.CreatedAt.Time), + } + + // `new` (accepted but not processed yet), `open`, or `closed` (filled or cancelled) + switch r.Status { + case "new": + o.Status = types.OrderStatusNew + case "open": + if !o.ExecutedQuantity.IsZero() { + o.Status = types.OrderStatusPartiallyFilled + } else { + o.Status = types.OrderStatusNew + } + case "closed": + // filled or canceled + if o.Quantity == o.ExecutedQuantity { + o.Status = types.OrderStatusFilled + } else { + // can't distinguish it's canceled or rejected from order response, so always set to canceled + o.Status = types.OrderStatusCanceled + } + default: + return types.Order{}, fmt.Errorf("unsupported status %s: %w", r.Status, errUnsupportedOrderStatus) + } + + return o, nil +} + +func toGlobalDeposit(input depositHistory) (types.Deposit, error) { + s, err := toGlobalDepositStatus(input.Status) + if err != nil { + log.WithError(err).Warnf("assign empty string to the deposit status") + } + t := input.Time + if input.ConfirmedTime.Time != (time.Time{}) { + t = input.ConfirmedTime + } + d := types.Deposit{ + GID: 0, + Exchange: types.ExchangeFTX, + Time: types.Time(t.Time), + Amount: input.Size, + Asset: toGlobalCurrency(input.Coin), + TransactionID: input.TxID, + Status: s, + Address: input.Address.Address, + AddressTag: input.Address.Tag, + } + return d, nil +} + +func toGlobalDepositStatus(input string) (types.DepositStatus, error) { + // The document only list `confirmed` status + switch input { + case "confirmed", "complete": + return types.DepositSuccess, nil + } + return "", fmt.Errorf("unsupported status %s", input) +} + +func toGlobalTrade(f ftxapi.Fill) (types.Trade, error) { + return types.Trade{ + ID: f.TradeId, + OrderID: f.OrderId, + Exchange: types.ExchangeFTX, + Price: f.Price, + Quantity: f.Size, + QuoteQuantity: f.Price.Mul(f.Size), + Symbol: toGlobalSymbol(f.Market), + Side: types.SideType(strings.ToUpper(string(f.Side))), + IsBuyer: f.Side == ftxapi.SideBuy, + IsMaker: f.Liquidity == ftxapi.LiquidityMaker, + Time: types.Time(f.Time), + Fee: f.Fee, + FeeCurrency: f.FeeCurrency, + IsMargin: false, + IsIsolated: false, + IsFutures: f.Future != "", + }, nil +} + +func toGlobalKLine(symbol string, interval types.Interval, h Candle) (types.KLine, error) { + return types.KLine{ + Exchange: types.ExchangeFTX, + Symbol: toGlobalSymbol(symbol), + StartTime: types.Time(h.StartTime.Time), + EndTime: types.Time(h.StartTime.Add(interval.Duration())), + Interval: interval, + Open: h.Open, + Close: h.Close, + High: h.High, + Low: h.Low, + Volume: h.Volume, + Closed: true, + }, nil +} + +type OrderType string + +const ( + OrderTypeLimit OrderType = "limit" + OrderTypeMarket OrderType = "market" +) + +func toLocalOrderType(orderType types.OrderType) (ftxapi.OrderType, error) { + switch orderType { + + case types.OrderTypeLimitMaker: + return ftxapi.OrderTypeLimit, nil + + case types.OrderTypeLimit: + return ftxapi.OrderTypeLimit, nil + + case types.OrderTypeMarket: + return ftxapi.OrderTypeMarket, nil + + } + + return "", fmt.Errorf("order type %s not supported", orderType) +} diff --git a/pkg/exchange/ftx/convert_test.go b/pkg/exchange/ftx/convert_test.go new file mode 100644 index 0000000000..3a1ea7f1e7 --- /dev/null +++ b/pkg/exchange/ftx/convert_test.go @@ -0,0 +1,121 @@ +package ftx + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/exchange/ftx/ftxapi" + "github.com/c9s/bbgo/pkg/types" +) + +func Test_toGlobalOrderFromOpenOrder(t *testing.T) { + input := ` +{ + "createdAt": "2019-03-05T09:56:55.728933+00:00", + "filledSize": 10, + "future": "XRP-PERP", + "id": 9596912, + "market": "XRP-PERP", + "price": 0.306525, + "avgFillPrice": 0.306526, + "remainingSize": 31421, + "side": "sell", + "size": 31431, + "status": "open", + "type": "limit", + "reduceOnly": false, + "ioc": false, + "postOnly": false, + "clientId": "client-id-123" +} +` + + var r order + assert.NoError(t, json.Unmarshal([]byte(input), &r)) + + o, err := toGlobalOrder(r) + assert.NoError(t, err) + assert.Equal(t, "client-id-123", o.ClientOrderID) + assert.Equal(t, "XRP-PERP", o.Symbol) + assert.Equal(t, types.SideTypeSell, o.Side) + assert.Equal(t, types.OrderTypeLimit, o.Type) + assert.Equal(t, "31431", o.Quantity.String()) + assert.Equal(t, "0.306525", o.Price.String()) + assert.Equal(t, types.TimeInForceGTC, o.TimeInForce) + assert.Equal(t, types.ExchangeFTX, o.Exchange) + assert.True(t, o.IsWorking) + assert.Equal(t, uint64(9596912), o.OrderID) + assert.Equal(t, types.OrderStatusPartiallyFilled, o.Status) + assert.Equal(t, "10", o.ExecutedQuantity.String()) +} + +func TestTrimLowerString(t *testing.T) { + type args struct { + original string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "spaces", + args: args{ + original: " ", + }, + want: "", + }, + { + name: "uppercase", + args: args{ + original: " HELLO ", + }, + want: "hello", + }, + { + name: "lowercase", + args: args{ + original: " hello", + }, + want: "hello", + }, + { + name: "upper/lower cases", + args: args{ + original: " heLLo ", + }, + want: "hello", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := TrimLowerString(tt.args.original); got != tt.want { + t.Errorf("TrimLowerString() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_toGlobalSymbol(t *testing.T) { + assert.Equal(t, "BTCUSDT", toGlobalSymbol("BTC/USDT")) +} + +func Test_toLocalOrderTypeWithLimitMaker(t *testing.T) { + orderType, err := toLocalOrderType(types.OrderTypeLimitMaker) + assert.NoError(t, err) + assert.Equal(t, ftxapi.OrderTypeLimit, orderType) +} + +func Test_toLocalOrderTypeWithLimit(t *testing.T) { + orderType, err := toLocalOrderType(types.OrderTypeLimit) + assert.NoError(t, err) + assert.Equal(t, ftxapi.OrderTypeLimit, orderType) +} + +func Test_toLocalOrderTypeWithMarket(t *testing.T) { + orderType, err := toLocalOrderType(types.OrderTypeMarket) + assert.NoError(t, err) + assert.Equal(t, ftxapi.OrderTypeMarket, orderType) +} diff --git a/pkg/exchange/ftx/exchange.go b/pkg/exchange/ftx/exchange.go new file mode 100644 index 0000000000..b947dc5c08 --- /dev/null +++ b/pkg/exchange/ftx/exchange.go @@ -0,0 +1,618 @@ +package ftx + +import ( + "context" + "fmt" + "net/http" + "net/url" + "sort" + "strconv" + "strings" + "time" + + "golang.org/x/time/rate" + + "github.com/google/uuid" + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/exchange/ftx/ftxapi" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +const ( + restEndpoint = "https://ftx.com" + defaultHTTPTimeout = 15 * time.Second +) + +var logger = logrus.WithField("exchange", "ftx") + +// POST https://ftx.com/api/orders 429, Success: false, err: Do not send more than 2 orders on this market per 200ms +var requestLimit = rate.NewLimiter(rate.Every(220*time.Millisecond), 2) + +var marketDataLimiter = rate.NewLimiter(rate.Every(500*time.Millisecond), 2) + +//go:generate go run generate_symbol_map.go + +type Exchange struct { + client *ftxapi.RestClient + + key, secret string + subAccount string + restEndpoint *url.URL +} + +type MarketTicker struct { + Market types.Market + Price fixedpoint.Value + Ask fixedpoint.Value + Bid fixedpoint.Value + Last fixedpoint.Value +} + +type MarketMap map[string]MarketTicker + +// FTX does not have broker ID +const spotBrokerID = "BBGO" + +func newSpotClientOrderID(originalID string) (clientOrderID string) { + prefix := "x-" + spotBrokerID + prefixLen := len(prefix) + + if originalID != "" { + // try to keep the whole original client order ID if user specifies it. + if prefixLen+len(originalID) > 32 { + return originalID + } + + clientOrderID = prefix + originalID + return clientOrderID + } + + clientOrderID = uuid.New().String() + clientOrderID = prefix + clientOrderID + if len(clientOrderID) > 32 { + return clientOrderID[0:32] + } + + return clientOrderID +} + +func NewExchange(key, secret string, subAccount string) *Exchange { + u, err := url.Parse(restEndpoint) + if err != nil { + panic(err) + } + + client := ftxapi.NewClient() + client.Auth(key, secret, subAccount) + return &Exchange{ + client: client, + restEndpoint: u, + key: key, + // pragma: allowlist nextline secret + secret: secret, + subAccount: subAccount, + } +} + +func (e *Exchange) newRest() *restRequest { + r := newRestRequest(&http.Client{Timeout: defaultHTTPTimeout}, e.restEndpoint).Auth(e.key, e.secret) + if len(e.subAccount) > 0 { + r.SubAccount(e.subAccount) + } + return r +} + +func (e *Exchange) Name() types.ExchangeName { + return types.ExchangeFTX +} + +func (e *Exchange) PlatformFeeCurrency() string { + return toGlobalCurrency("FTT") +} + +func (e *Exchange) NewStream() types.Stream { + return NewStream(e.key, e.secret, e.subAccount, e) +} + +func (e *Exchange) QueryMarkets(ctx context.Context) (types.MarketMap, error) { + markets, err := e._queryMarkets(ctx) + if err != nil { + return nil, err + } + marketMap := types.MarketMap{} + for k, v := range markets { + marketMap[k] = v.Market + } + return marketMap, nil +} + +func (e *Exchange) _queryMarkets(ctx context.Context) (MarketMap, error) { + req := e.client.NewGetMarketsRequest() + ftxMarkets, err := req.Do(ctx) + if err != nil { + return nil, err + } + + markets := MarketMap{} + for _, m := range ftxMarkets { + symbol := toGlobalSymbol(m.Name) + symbolMap[symbol] = m.Name + + mkt2 := MarketTicker{ + Market: types.Market{ + Symbol: symbol, + LocalSymbol: m.Name, + // The max precision is length(DefaultPow). For example, currently fixedpoint.DefaultPow + // is 1e8, so the max precision will be 8. + PricePrecision: m.PriceIncrement.NumFractionalDigits(), + VolumePrecision: m.SizeIncrement.NumFractionalDigits(), + QuoteCurrency: toGlobalCurrency(m.QuoteCurrency), + BaseCurrency: toGlobalCurrency(m.BaseCurrency), + // FTX only limit your order by `MinProvideSize`, so I assign zero value to unsupported fields: + // MinNotional, MinAmount, MaxQuantity, MinPrice and MaxPrice. + MinNotional: fixedpoint.Zero, + MinAmount: fixedpoint.Zero, + MinQuantity: m.MinProvideSize, + MaxQuantity: fixedpoint.Zero, + StepSize: m.SizeIncrement, + MinPrice: fixedpoint.Zero, + MaxPrice: fixedpoint.Zero, + TickSize: m.PriceIncrement, + }, + Price: m.Price, + Bid: m.Bid, + Ask: m.Ask, + Last: m.Last, + } + markets[symbol] = mkt2 + } + return markets, nil +} + +func (e *Exchange) QueryAccount(ctx context.Context) (*types.Account, error) { + + req := e.client.NewGetAccountRequest() + ftxAccount, err := req.Do(ctx) + if err != nil { + return nil, err + } + + a := &types.Account{ + TotalAccountValue: ftxAccount.TotalAccountValue, + } + + balances, err := e.QueryAccountBalances(ctx) + if err != nil { + return nil, err + } + + a.UpdateBalances(balances) + return a, nil +} + +func (e *Exchange) QueryAccountBalances(ctx context.Context) (types.BalanceMap, error) { + balanceReq := e.client.NewGetBalancesRequest() + ftxBalances, err := balanceReq.Do(ctx) + if err != nil { + return nil, err + } + + var balances = make(types.BalanceMap) + for _, r := range ftxBalances { + currency := toGlobalCurrency(r.Coin) + balances[currency] = types.Balance{ + Currency: currency, + Available: r.Free, + Locked: r.Total.Sub(r.Free), + } + } + + return balances, nil +} + +// resolution field in api +// window length in seconds. options: 15, 60, 300, 900, 3600, 14400, 86400, or any multiple of 86400 up to 30*86400 +var supportedIntervals = map[types.Interval]int{ + types.Interval1m: 1, + types.Interval5m: 5, + types.Interval15m: 15, + types.Interval1h: 60, + types.Interval4h: 60 * 4, + types.Interval1d: 60 * 24, + types.Interval3d: 60 * 24 * 3, +} + +func (e *Exchange) SupportedInterval() map[types.Interval]int { + return supportedIntervals +} + +func (e *Exchange) IsSupportedInterval(interval types.Interval) bool { + return isIntervalSupportedInKLine(interval) +} + +func (e *Exchange) QueryKLines(ctx context.Context, symbol string, interval types.Interval, options types.KLineQueryOptions) ([]types.KLine, error) { + var klines []types.KLine + + // the fetch result is from newest to oldest + // currentEnd = until + // endTime := currentEnd.Add(interval.Duration()) + klines, err := e._queryKLines(ctx, symbol, interval, options) + if err != nil { + return nil, err + } + + klines = types.SortKLinesAscending(klines) + return klines, nil +} + +func (e *Exchange) _queryKLines(ctx context.Context, symbol string, interval types.Interval, options types.KLineQueryOptions) ([]types.KLine, error) { + if !isIntervalSupportedInKLine(interval) { + return nil, fmt.Errorf("interval %s is not supported", interval.String()) + } + + if err := marketDataLimiter.Wait(ctx); err != nil { + return nil, err + } + + // assign limit to a default value since ftx has the limit + if options.Limit == 0 { + options.Limit = 500 + } + + // if the time range exceed the ftx valid time range, we need to adjust the endTime + if options.StartTime != nil && options.EndTime != nil { + rangeDuration := options.EndTime.Sub(*options.StartTime) + estimatedCount := rangeDuration / interval.Duration() + + if options.Limit != 0 && uint64(estimatedCount) > uint64(options.Limit) { + endTime := options.StartTime.Add(interval.Duration() * time.Duration(options.Limit)) + options.EndTime = &endTime + } + } + + resp, err := e.newRest().HistoricalPrices(ctx, toLocalSymbol(symbol), interval, int64(options.Limit), options.StartTime, options.EndTime) + if err != nil { + return nil, err + } + if !resp.Success { + return nil, fmt.Errorf("ftx returns failure") + } + + var klines []types.KLine + for _, r := range resp.Result { + globalKline, err := toGlobalKLine(symbol, interval, r) + if err != nil { + return nil, err + } + klines = append(klines, globalKline) + } + + return klines, nil +} + +func isIntervalSupportedInKLine(interval types.Interval) bool { + _, ok := supportedIntervals[interval] + return ok +} + +func (e *Exchange) QueryTrades(ctx context.Context, symbol string, options *types.TradeQueryOptions) ([]types.Trade, error) { + tradeIDs := make(map[uint64]struct{}) + lastTradeID := options.LastTradeID + + req := e.client.NewGetFillsRequest() + req.Market(toLocalSymbol(symbol)) + + if options.StartTime != nil { + req.StartTime(*options.StartTime) + } else if options.EndTime != nil { + req.EndTime(*options.EndTime) + } + + req.Order("asc") + fills, err := req.Do(ctx) + if err != nil { + return nil, err + } + + sort.Slice(fills, func(i, j int) bool { + return fills[i].Time.Before(fills[j].Time) + }) + + var trades []types.Trade + symbol = strings.ToUpper(symbol) + for _, fill := range fills { + if _, ok := tradeIDs[fill.TradeId]; ok { + continue + } + + if options.StartTime != nil && fill.Time.Before(*options.StartTime) { + continue + } + + if options.EndTime != nil && fill.Time.After(*options.EndTime) { + continue + } + + if fill.TradeId <= lastTradeID { + continue + } + + tradeIDs[fill.TradeId] = struct{}{} + lastTradeID = fill.TradeId + + t, err := toGlobalTrade(fill) + if err != nil { + return nil, err + } + trades = append(trades, t) + } + + return trades, nil +} + +func (e *Exchange) QueryDepositHistory(ctx context.Context, asset string, since, until time.Time) (allDeposits []types.Deposit, err error) { + if until == (time.Time{}) { + until = time.Now() + } + if since.After(until) { + return nil, fmt.Errorf("invalid query deposit history time range, since: %+v, until: %+v", since, until) + } + asset = TrimUpperString(asset) + + resp, err := e.newRest().DepositHistory(ctx, since, until, 0) + if err != nil { + return nil, err + } + if !resp.Success { + return nil, fmt.Errorf("ftx returns failure") + } + sort.Slice(resp.Result, func(i, j int) bool { + return resp.Result[i].Time.Before(resp.Result[j].Time.Time) + }) + for _, r := range resp.Result { + d, err := toGlobalDeposit(r) + if err != nil { + return nil, err + } + if d.Asset == asset && !since.After(d.Time.Time()) && !until.Before(d.Time.Time()) { + allDeposits = append(allDeposits, d) + } + } + return +} + +func (e *Exchange) SubmitOrders(ctx context.Context, orders ...types.SubmitOrder) (types.OrderSlice, error) { + var createdOrders types.OrderSlice + // TODO: currently only support limit and market order + // TODO: support time in force + for _, so := range orders { + if err := requestLimit.Wait(ctx); err != nil { + logrus.WithError(err).Error("rate limit error") + } + + orderType, err := toLocalOrderType(so.Type) + if err != nil { + logrus.WithError(err).Error("type error") + } + + req := e.client.NewPlaceOrderRequest() + req.Market(toLocalSymbol(TrimUpperString(so.Symbol))) + req.OrderType(orderType) + req.Side(ftxapi.Side(TrimLowerString(string(so.Side)))) + req.Size(so.Quantity) + + switch so.Type { + case types.OrderTypeLimit, types.OrderTypeLimitMaker: + req.Price(so.Price) + + } + + if so.Type == types.OrderTypeLimitMaker { + req.PostOnly(true) + } + + if so.TimeInForce == types.TimeInForceIOC { + req.Ioc(true) + } + + req.ClientID(newSpotClientOrderID(so.ClientOrderID)) + + or, err := req.Do(ctx) + if err != nil { + return createdOrders, fmt.Errorf("failed to place order %+v: %w", so, err) + } + + globalOrder, err := toGlobalOrderNew(*or) + if err != nil { + return createdOrders, fmt.Errorf("failed to convert response to global order") + } + + createdOrders = append(createdOrders, globalOrder) + } + return createdOrders, nil +} + +func (e *Exchange) QueryOrder(ctx context.Context, q types.OrderQuery) (*types.Order, error) { + orderID, err := strconv.ParseInt(q.OrderID, 10, 64) + if err != nil { + return nil, err + } + + req := e.client.NewGetOrderStatusRequest(uint64(orderID)) + ftxOrder, err := req.Do(ctx) + if err != nil { + return nil, err + } + + order, err := toGlobalOrderNew(*ftxOrder) + return &order, err +} + +func (e *Exchange) QueryOpenOrders(ctx context.Context, symbol string) (orders []types.Order, err error) { + // TODO: invoke open trigger orders + + req := e.client.NewGetOpenOrdersRequest(toLocalSymbol(symbol)) + ftxOrders, err := req.Do(ctx) + if err != nil { + return nil, err + } + + for _, ftxOrder := range ftxOrders { + o, err := toGlobalOrderNew(ftxOrder) + if err != nil { + return orders, err + } + + orders = append(orders, o) + } + return orders, nil +} + +// symbol, since and until are all optional. FTX can only query by order created time, not updated time. +// FTX doesn't support lastOrderID, so we will query by the time range first, and filter by the lastOrderID. +func (e *Exchange) QueryClosedOrders(ctx context.Context, symbol string, since, until time.Time, lastOrderID uint64) (orders []types.Order, err error) { + symbol = TrimUpperString(symbol) + + req := e.client.NewGetOrderHistoryRequest(toLocalSymbol(symbol)) + + if since != (time.Time{}) { + req.StartTime(since) + } else if until != (time.Time{}) { + req.EndTime(until) + } + + ftxOrders, err := req.Do(ctx) + if err != nil { + return nil, err + } + + sort.Slice(ftxOrders, func(i, j int) bool { + return ftxOrders[i].CreatedAt.Before(ftxOrders[j].CreatedAt) + }) + + for _, ftxOrder := range ftxOrders { + switch ftxOrder.Status { + case ftxapi.OrderStatusOpen, ftxapi.OrderStatusNew: + continue + } + + o, err := toGlobalOrderNew(ftxOrder) + if err != nil { + return orders, err + } + + orders = append(orders, o) + } + return orders, nil +} + +func (e *Exchange) CancelOrders(ctx context.Context, orders ...types.Order) error { + for _, o := range orders { + if err := requestLimit.Wait(ctx); err != nil { + logrus.WithError(err).Error("rate limit error") + } + + var resp *ftxapi.APIResponse + var err error + if len(o.ClientOrderID) > 0 { + req := e.client.NewCancelOrderByClientOrderIdRequest(o.ClientOrderID) + resp, err = req.Do(ctx) + } else { + req := e.client.NewCancelOrderRequest(strconv.FormatUint(o.OrderID, 10)) + resp, err = req.Do(ctx) + } + + if err != nil { + return err + } + + if !resp.Success { + return fmt.Errorf("cancel order failed: %s", resp.Result) + } + } + return nil +} + +func (e *Exchange) QueryTicker(ctx context.Context, symbol string) (*types.Ticker, error) { + ticketMap, err := e.QueryTickers(ctx, symbol) + if err != nil { + return nil, err + } + + if ticker, ok := ticketMap[symbol]; ok { + return &ticker, nil + } + return nil, fmt.Errorf("ticker %s not found", symbol) +} + +func (e *Exchange) QueryTickers(ctx context.Context, symbol ...string) (map[string]types.Ticker, error) { + + var tickers = make(map[string]types.Ticker) + + markets, err := e._queryMarkets(ctx) + if err != nil { + return nil, err + } + + m := make(map[string]struct{}) + for _, s := range symbol { + m[toGlobalSymbol(s)] = struct{}{} + } + + rest := e.newRest() + + for k, v := range markets { + + // if we provide symbol as condition then we only query the gieven symbol , + // or we should query "ALL" symbol in the market. + if _, ok := m[toGlobalSymbol(k)]; len(symbol) != 0 && !ok { + continue + } + + if err := requestLimit.Wait(ctx); err != nil { + logrus.WithError(err).Errorf("order rate limiter wait error") + } + + // ctx context.Context, market string, interval types.Interval, limit int64, start, end time.Time + now := time.Now() + since := now.Add(time.Duration(-1) * time.Hour) + until := now + prices, err := rest.HistoricalPrices(ctx, v.Market.LocalSymbol, types.Interval1h, 1, &since, &until) + if err != nil || !prices.Success || len(prices.Result) == 0 { + continue + } + + lastCandle := prices.Result[0] + tickers[toGlobalSymbol(k)] = types.Ticker{ + Time: lastCandle.StartTime.Time, + Volume: lastCandle.Volume, + Last: v.Last, + Open: lastCandle.Open, + High: lastCandle.High, + Low: lastCandle.Low, + Buy: v.Bid, + Sell: v.Ask, + } + } + + return tickers, nil +} + +func (e *Exchange) Transfer(ctx context.Context, coin string, size float64, destination string) (string, error) { + payload := TransferPayload{ + Coin: coin, + Size: size, + Source: e.subAccount, + Destination: destination, + } + resp, err := e.newRest().Transfer(ctx, payload) + if err != nil { + return "", err + } + if !resp.Success { + return "", fmt.Errorf("ftx returns transfer failure") + } + return resp.Result.String(), nil +} diff --git a/pkg/exchange/ftx/exchange_test.go b/pkg/exchange/ftx/exchange_test.go new file mode 100644 index 0000000000..eb7cc731a1 --- /dev/null +++ b/pkg/exchange/ftx/exchange_test.go @@ -0,0 +1,612 @@ +package ftx + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func integrationTestConfigured() (key, secret string, ok bool) { + var hasKey, hasSecret bool + key, hasKey = os.LookupEnv("FTX_API_KEY") + secret, hasSecret = os.LookupEnv("FTX_API_SECRET") + ok = hasKey && hasSecret && os.Getenv("TEST_FTX") == "1" + return key, secret, ok +} + +func TestExchange_IOCOrder(t *testing.T) { + key, secret, ok := integrationTestConfigured() + if !ok { + t.SkipNow() + return + } + + ex := NewExchange(key, secret, "") + createdOrder, err := ex.SubmitOrders(context.Background(), types.SubmitOrder{ + Symbol: "LTCUSDT", + Side: types.SideTypeBuy, + Type: types.OrderTypeLimitMaker, + Quantity: fixedpoint.NewFromFloat(1.0), + Price: fixedpoint.NewFromFloat(50.0), + Market: types.Market{ + Symbol: "LTCUSDT", + LocalSymbol: "LTC/USDT", + PricePrecision: 3, + VolumePrecision: 2, + QuoteCurrency: "USDT", + BaseCurrency: "LTC", + MinQuantity: fixedpoint.NewFromFloat(0.01), + StepSize: fixedpoint.NewFromFloat(0.01), + TickSize: fixedpoint.NewFromFloat(0.01), + }, + TimeInForce: "IOC", + }) + assert.NoError(t, err) + assert.NotEmpty(t, createdOrder) + t.Logf("created orders: %+v", createdOrder) +} + +func TestExchange_QueryAccountBalances(t *testing.T) { + successResp := ` +{ + "result": [ + { + "availableWithoutBorrow": 19.47458865, + "coin": "USD", + "free": 19.48085209, + "spotBorrow": 0.0, + "total": 1094.66405065, + "usdValue": 1094.664050651561 + } + ], + "success": true +} +` + failureResp := `{"result":[],"success":false}` + i := 0 + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if i == 0 { + fmt.Fprintln(w, successResp) + i++ + return + } + fmt.Fprintln(w, failureResp) + })) + defer ts.Close() + + ex := NewExchange("test-key", "test-secret", "") + serverURL, err := url.Parse(ts.URL) + assert.NoError(t, err) + ex.client.BaseURL = serverURL + + resp, err := ex.QueryAccountBalances(context.Background()) + assert.NoError(t, err) + + assert.Len(t, resp, 1) + b, ok := resp["USD"] + assert.True(t, ok) + expectedAvailable := fixedpoint.Must(fixedpoint.NewFromString("19.48085209")) + assert.Equal(t, expectedAvailable, b.Available) + assert.Equal(t, fixedpoint.Must(fixedpoint.NewFromString("1094.66405065")).Sub(expectedAvailable), b.Locked) +} + +func TestExchange_QueryOpenOrders(t *testing.T) { + successResp := ` +{ + "success": true, + "result": [ + { + "createdAt": "2019-03-05T09:56:55.728933+00:00", + "filledSize": 10, + "future": "XRP-PERP", + "id": 9596912, + "market": "XRP-PERP", + "price": 0.306525, + "avgFillPrice": 0.306526, + "remainingSize": 31421, + "side": "sell", + "size": 31431, + "status": "open", + "type": "limit", + "reduceOnly": false, + "ioc": false, + "postOnly": false, + "clientId": null + } + ] +} +` + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + fmt.Fprintln(w, successResp) + })) + defer ts.Close() + + ex := NewExchange("test-key", "test-secret", "") + + serverURL, err := url.Parse(ts.URL) + assert.NoError(t, err) + ex.client.BaseURL = serverURL + + resp, err := ex.QueryOpenOrders(context.Background(), "XRP-PREP") + assert.NoError(t, err) + assert.Len(t, resp, 1) + assert.Equal(t, "XRP-PERP", resp[0].Symbol) +} + +func TestExchange_QueryClosedOrders(t *testing.T) { + t.Run("no closed orders", func(t *testing.T) { + successResp := `{"success": true, "result": []}` + + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + fmt.Fprintln(w, successResp) + })) + defer ts.Close() + + ex := NewExchange("test-key", "test-secret", "") + serverURL, err := url.Parse(ts.URL) + assert.NoError(t, err) + ex.client.BaseURL = serverURL + + resp, err := ex.QueryClosedOrders(context.Background(), "BTC-PERP", time.Now(), time.Now(), 100) + assert.NoError(t, err) + + assert.Len(t, resp, 0) + }) + t.Run("one closed order", func(t *testing.T) { + successResp := ` +{ + "success": true, + "result": [ + { + "avgFillPrice": 10135.25, + "clientId": null, + "createdAt": "2019-06-27T15:24:03.101197+00:00", + "filledSize": 0.001, + "future": "BTC-PERP", + "id": 257132591, + "ioc": false, + "market": "BTC-PERP", + "postOnly": false, + "price": 10135.25, + "reduceOnly": false, + "remainingSize": 0.0, + "side": "buy", + "size": 0.001, + "status": "closed", + "type": "limit" + } + ], + "hasMoreData": false +} +` + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + fmt.Fprintln(w, successResp) + })) + defer ts.Close() + + ex := NewExchange("test-key", "test-secret", "") + serverURL, err := url.Parse(ts.URL) + assert.NoError(t, err) + ex.client.BaseURL = serverURL + + resp, err := ex.QueryClosedOrders(context.Background(), "BTC-PERP", time.Now(), time.Now(), 100) + assert.NoError(t, err) + assert.Len(t, resp, 1) + assert.Equal(t, "BTC-PERP", resp[0].Symbol) + }) + + t.Run("sort the order", func(t *testing.T) { + successResp := ` +{ + "success": true, + "result": [ + { + "status": "closed", + "createdAt": "2020-09-01T15:24:03.101197+00:00", + "id": 789 + }, + { + "status": "closed", + "createdAt": "2019-03-27T15:24:03.101197+00:00", + "id": 123 + }, + { + "status": "closed", + "createdAt": "2019-06-27T15:24:03.101197+00:00", + "id": 456 + }, + { + "status": "new", + "createdAt": "2019-06-27T15:24:03.101197+00:00", + "id": 999 + } + ], + "hasMoreData": false +} +` + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + fmt.Fprintln(w, successResp) + })) + defer ts.Close() + + ex := NewExchange("test-key", "test-secret", "") + serverURL, err := url.Parse(ts.URL) + assert.NoError(t, err) + ex.client.BaseURL = serverURL + + resp, err := ex.QueryClosedOrders(context.Background(), "BTC-PERP", time.Now(), time.Now(), 100) + assert.NoError(t, err) + assert.Len(t, resp, 3) + + expectedOrderID := []uint64{123, 456, 789} + for i, o := range resp { + assert.Equal(t, expectedOrderID[i], o.OrderID) + } + }) +} + +func TestExchange_QueryAccount(t *testing.T) { + balanceResp := ` +{ + "result": [ + { + "availableWithoutBorrow": 19.47458865, + "coin": "USD", + "free": 19.48085209, + "spotBorrow": 0.0, + "total": 1094.66405065, + "usdValue": 1094.664050651561 + } + ], + "success": true +} +` + + accountInfoResp := ` +{ + "success": true, + "result": { + "backstopProvider": true, + "collateral": 3568181.02691129, + "freeCollateral": 1786071.456884368, + "initialMarginRequirement": 0.12222384240257728, + "leverage": 10, + "liquidating": false, + "maintenanceMarginRequirement": 0.07177992558058484, + "makerFee": 0.0002, + "marginFraction": 0.5588433331419503, + "openMarginFraction": 0.2447194090423075, + "takerFee": 0.0005, + "totalAccountValue": 3568180.98341129, + "totalPositionSize": 6384939.6992, + "username": "user@domain.com", + "positions": [ + { + "cost": -31.7906, + "entryPrice": 138.22, + "future": "ETH-PERP", + "initialMarginRequirement": 0.1, + "longOrderSize": 1744.55, + "maintenanceMarginRequirement": 0.04, + "netSize": -0.23, + "openSize": 1744.32, + "realizedPnl": 3.39441714, + "shortOrderSize": 1732.09, + "side": "sell", + "size": 0.23, + "unrealizedPnl": 0 + } + ] + } +} +` + returnBalance := false + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if returnBalance { + fmt.Fprintln(w, balanceResp) + return + } + returnBalance = true + fmt.Fprintln(w, accountInfoResp) + })) + defer ts.Close() + + ex := NewExchange("test-key", "test-secret", "") + serverURL, err := url.Parse(ts.URL) + assert.NoError(t, err) + ex.client.BaseURL = serverURL + + resp, err := ex.QueryAccount(context.Background()) + assert.NoError(t, err) + + b, ok := resp.Balance("USD") + assert.True(t, ok) + expected := types.Balance{ + Currency: "USD", + Available: fixedpoint.MustNewFromString("19.48085209"), + Locked: fixedpoint.MustNewFromString("1094.66405065"), + } + expected.Locked = expected.Locked.Sub(expected.Available) + assert.Equal(t, expected, b) +} + +func TestExchange_QueryMarkets(t *testing.T) { + respJSON := `{ +"success": true, +"result": [ + { + "name": "BTC/USD", + "enabled": true, + "postOnly": false, + "priceIncrement": 1.0, + "sizeIncrement": 0.0001, + "minProvideSize": 0.001, + "last": 59039.0, + "bid": 59038.0, + "ask": 59040.0, + "price": 59039.0, + "type": "spot", + "baseCurrency": "BTC", + "quoteCurrency": "USD", + "underlying": null, + "restricted": false, + "highLeverageFeeExempt": true, + "change1h": 0.0015777151969599294, + "change24h": 0.05475756601279165, + "changeBod": -0.0035107262814994852, + "quoteVolume24h": 316493675.5463, + "volumeUsd24h": 316493675.5463 + } +] +}` + + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + fmt.Fprintln(w, respJSON) + })) + defer ts.Close() + + ex := NewExchange("test-key", "test-secret", "") + serverURL, err := url.Parse(ts.URL) + assert.NoError(t, err) + ex.client.BaseURL = serverURL + ex.restEndpoint = serverURL + + resp, err := ex.QueryMarkets(context.Background()) + assert.NoError(t, err) + + assert.Len(t, resp, 1) + assert.Equal(t, types.Market{ + Symbol: "BTCUSD", + LocalSymbol: "BTC/USD", + PricePrecision: 0, + VolumePrecision: 4, + QuoteCurrency: "USD", + BaseCurrency: "BTC", + MinQuantity: fixedpoint.NewFromFloat(0.001), + StepSize: fixedpoint.NewFromFloat(0.0001), + TickSize: fixedpoint.NewFromInt(1), + }, resp["BTCUSD"]) +} + +func TestExchange_QueryDepositHistory(t *testing.T) { + respJSON := ` +{ + "success": true, + "result": [ + { + "coin": "TUSD", + "confirmations": 64, + "confirmedTime": "2019-03-05T09:56:55.728933+00:00", + "fee": 0, + "id": 1, + "sentTime": "2019-03-05T09:56:55.735929+00:00", + "size": 99.0, + "status": "confirmed", + "time": "2019-03-05T09:56:55.728933+00:00", + "txid": "0x8078356ae4b06a036d64747546c274af19581f1c78c510b60505798a7ffcaf1", + "address": {"address": "test-addr", "tag": "test-tag"} + } + ] +} +` + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + fmt.Fprintln(w, respJSON) + })) + defer ts.Close() + + ex := NewExchange("test-key", "test-secret", "") + serverURL, err := url.Parse(ts.URL) + assert.NoError(t, err) + ex.client.BaseURL = serverURL + ex.restEndpoint = serverURL + + ctx := context.Background() + layout := "2006-01-02T15:04:05.999999Z07:00" + actualConfirmedTime, err := time.Parse(layout, "2019-03-05T09:56:55.728933+00:00") + assert.NoError(t, err) + dh, err := ex.QueryDepositHistory(ctx, "TUSD", actualConfirmedTime.Add(-1*time.Hour), actualConfirmedTime.Add(1*time.Hour)) + assert.NoError(t, err) + assert.Len(t, dh, 1) + assert.Equal(t, types.Deposit{ + Exchange: types.ExchangeFTX, + Time: types.Time(actualConfirmedTime), + Amount: fixedpoint.NewFromInt(99), + Asset: "TUSD", + TransactionID: "0x8078356ae4b06a036d64747546c274af19581f1c78c510b60505798a7ffcaf1", + Status: types.DepositSuccess, + Address: "test-addr", + AddressTag: "test-tag", + }, dh[0]) + + // not in the time range + dh, err = ex.QueryDepositHistory(ctx, "TUSD", actualConfirmedTime.Add(1*time.Hour), actualConfirmedTime.Add(2*time.Hour)) + assert.NoError(t, err) + assert.Len(t, dh, 0) + + // exclude by asset + dh, err = ex.QueryDepositHistory(ctx, "BTC", actualConfirmedTime.Add(-1*time.Hour), actualConfirmedTime.Add(1*time.Hour)) + assert.NoError(t, err) + assert.Len(t, dh, 0) +} + +func TestExchange_QueryTrades(t *testing.T) { + t.Run("empty response", func(t *testing.T) { + respJSON := ` +{ + "success": true, + "result": [] +} +` + var f fillsResponse + assert.NoError(t, json.Unmarshal([]byte(respJSON), &f)) + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + fmt.Fprintln(w, respJSON) + })) + defer ts.Close() + + ex := NewExchange("test-key", "test-secret", "") + serverURL, err := url.Parse(ts.URL) + assert.NoError(t, err) + ex.client.BaseURL = serverURL + + ctx := context.Background() + actualConfirmedTime, err := parseDatetime("2021-02-23T09:29:08.534000+00:00") + assert.NoError(t, err) + + since := actualConfirmedTime.Add(-1 * time.Hour) + until := actualConfirmedTime.Add(1 * time.Hour) + + // ignore unavailable market + trades, err := ex.QueryTrades(ctx, "TSLA/USD", &types.TradeQueryOptions{ + StartTime: &since, + EndTime: &until, + Limit: 0, + LastTradeID: 0, + }) + assert.NoError(t, err) + assert.Len(t, trades, 0) + }) + + t.Run("duplicated response", func(t *testing.T) { + respJSON := ` +{ + "success": true, + "result": [{ + "id": 123, + "market": "TSLA/USD", + "future": null, + "baseCurrency": "TSLA", + "quoteCurrency": "USD", + "type": "order", + "side": "sell", + "price": 672.5, + "size": 1.0, + "orderId": 456, + "time": "2021-02-23T09:29:08.534000+00:00", + "tradeId": 789, + "feeRate": -5e-6, + "fee": -0.0033625, + "feeCurrency": "USD", + "liquidity": "maker" +}, { + "id": 123, + "market": "TSLA/USD", + "future": null, + "baseCurrency": "TSLA", + "quoteCurrency": "USD", + "type": "order", + "side": "sell", + "price": 672.5, + "size": 1.0, + "orderId": 456, + "time": "2021-02-23T09:29:08.534000+00:00", + "tradeId": 789, + "feeRate": -5e-6, + "fee": -0.0033625, + "feeCurrency": "USD", + "liquidity": "maker" +}] +} +` + var f fillsResponse + assert.NoError(t, json.Unmarshal([]byte(respJSON), &f)) + i := 0 + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if i == 0 { + fmt.Fprintln(w, respJSON) + return + } + fmt.Fprintln(w, `{"success":true, "result":[]}`) + })) + defer ts.Close() + + ex := NewExchange("test-key", "test-secret", "") + serverURL, err := url.Parse(ts.URL) + assert.NoError(t, err) + ex.client.BaseURL = serverURL + + ctx := context.Background() + actualConfirmedTime, err := parseDatetime("2021-02-23T09:29:08.534000+00:00") + assert.NoError(t, err) + + since := actualConfirmedTime.Add(-1 * time.Hour) + until := actualConfirmedTime.Add(1 * time.Hour) + + // ignore unavailable market + trades, err := ex.QueryTrades(ctx, "TSLA/USD", &types.TradeQueryOptions{ + StartTime: &since, + EndTime: &until, + Limit: 0, + LastTradeID: 0, + }) + assert.NoError(t, err) + assert.Len(t, trades, 1) + assert.Equal(t, types.Trade{ + ID: 789, + OrderID: 456, + Exchange: types.ExchangeFTX, + Price: fixedpoint.NewFromFloat(672.5), + Quantity: fixedpoint.One, + QuoteQuantity: fixedpoint.NewFromFloat(672.5 * 1.0), + Symbol: "TSLAUSD", + Side: types.SideTypeSell, + IsBuyer: false, + IsMaker: true, + Time: types.Time(actualConfirmedTime), + Fee: fixedpoint.NewFromFloat(-0.0033625), + FeeCurrency: "USD", + IsMargin: false, + IsIsolated: false, + StrategyID: sql.NullString{}, + PnL: sql.NullFloat64{}, + }, trades[0]) + }) +} + +func Test_isIntervalSupportedInKLine(t *testing.T) { + supportedIntervals := []types.Interval{ + types.Interval1m, + types.Interval5m, + types.Interval15m, + types.Interval1h, + types.Interval1d, + } + for _, i := range supportedIntervals { + assert.True(t, isIntervalSupportedInKLine(i)) + } + assert.False(t, isIntervalSupportedInKLine(types.Interval30m)) + assert.False(t, isIntervalSupportedInKLine(types.Interval2h)) + assert.True(t, isIntervalSupportedInKLine(types.Interval3d)) +} diff --git a/pkg/exchange/ftx/ftxapi/account.go b/pkg/exchange/ftx/ftxapi/account.go new file mode 100644 index 0000000000..f6309f272c --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/account.go @@ -0,0 +1,87 @@ +package ftxapi + +//go:generate -command GetRequest requestgen -method GET -responseType .APIResponse -responseDataField Result +//go:generate -command PostRequest requestgen -method POST -responseType .APIResponse -responseDataField Result +//go:generate -command DeleteRequest requestgen -method DELETE -responseType .APIResponse -responseDataField Result + +import ( + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type Position struct { + Cost fixedpoint.Value `json:"cost"` + EntryPrice fixedpoint.Value `json:"entryPrice"` + Future string `json:"future"` + InitialMarginRequirement fixedpoint.Value `json:"initialMarginRequirement"` + LongOrderSize fixedpoint.Value `json:"longOrderSize"` + MaintenanceMarginRequirement fixedpoint.Value `json:"maintenanceMarginRequirement"` + NetSize fixedpoint.Value `json:"netSize"` + OpenSize fixedpoint.Value `json:"openSize"` + ShortOrderSize fixedpoint.Value `json:"shortOrderSize"` + Side string `json:"side"` + Size fixedpoint.Value `json:"size"` + RealizedPnl fixedpoint.Value `json:"realizedPnl"` + UnrealizedPnl fixedpoint.Value `json:"unrealizedPnl"` +} + +type Account struct { + BackstopProvider bool `json:"backstopProvider"` + Collateral fixedpoint.Value `json:"collateral"` + FreeCollateral fixedpoint.Value `json:"freeCollateral"` + Leverage fixedpoint.Value `json:"leverage"` + InitialMarginRequirement fixedpoint.Value `json:"initialMarginRequirement"` + MaintenanceMarginRequirement fixedpoint.Value `json:"maintenanceMarginRequirement"` + Liquidating bool `json:"liquidating"` + MakerFee fixedpoint.Value `json:"makerFee"` + MarginFraction fixedpoint.Value `json:"marginFraction"` + OpenMarginFraction fixedpoint.Value `json:"openMarginFraction"` + TakerFee fixedpoint.Value `json:"takerFee"` + TotalAccountValue fixedpoint.Value `json:"totalAccountValue"` + TotalPositionSize fixedpoint.Value `json:"totalPositionSize"` + Username string `json:"username"` + Positions []Position `json:"positions"` +} + +//go:generate GetRequest -url "/api/account" -type GetAccountRequest -responseDataType .Account +type GetAccountRequest struct { + client requestgen.AuthenticatedAPIClient +} + +func (c *RestClient) NewGetAccountRequest() *GetAccountRequest { + return &GetAccountRequest{ + client: c, + } +} + +//go:generate GetRequest -url "/api/positions" -type GetPositionsRequest -responseDataType []Position +type GetPositionsRequest struct { + client requestgen.AuthenticatedAPIClient +} + +func (c *RestClient) NewGetPositionsRequest() *GetPositionsRequest { + return &GetPositionsRequest{ + client: c, + } +} + +type Balance struct { + Coin string `json:"coin"` + Free fixedpoint.Value `json:"free"` + SpotBorrow fixedpoint.Value `json:"spotBorrow"` + Total fixedpoint.Value `json:"total"` + UsdValue fixedpoint.Value `json:"usdValue"` + AvailableWithoutBorrow fixedpoint.Value `json:"availableWithoutBorrow"` +} + +//go:generate GetRequest -url "/api/wallet/balances" -type GetBalancesRequest -responseDataType []Balance +type GetBalancesRequest struct { + client requestgen.AuthenticatedAPIClient +} + +func (c *RestClient) NewGetBalancesRequest() *GetBalancesRequest { + return &GetBalancesRequest{ + client: c, + } +} diff --git a/pkg/exchange/ftx/ftxapi/cancel_all_order_request_requestgen.go b/pkg/exchange/ftx/ftxapi/cancel_all_order_request_requestgen.go new file mode 100644 index 0000000000..f47ea614cf --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/cancel_all_order_request_requestgen.go @@ -0,0 +1,126 @@ +// Code generated by "requestgen -method DELETE -url /api/orders -type CancelAllOrderRequest -responseType .APIResponse"; DO NOT EDIT. + +package ftxapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (c *CancelAllOrderRequest) Market(market string) *CancelAllOrderRequest { + c.market = &market + return c +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (c *CancelAllOrderRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (c *CancelAllOrderRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check market field -> json key market + if c.market != nil { + market := *c.market + + // assign parameter of market + params["market"] = market + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (c *CancelAllOrderRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := c.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (c *CancelAllOrderRequest) GetParametersJSON() ([]byte, error) { + params, err := c.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (c *CancelAllOrderRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (c *CancelAllOrderRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (c *CancelAllOrderRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := c.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (c *CancelAllOrderRequest) Do(ctx context.Context) (*APIResponse, error) { + + params, err := c.GetParameters() + if err != nil { + return nil, err + } + query := url.Values{} + + apiURL := "/api/orders" + + req, err := c.client.NewAuthenticatedRequest(ctx, "DELETE", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := c.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/ftx/ftxapi/cancel_order_by_client_order_id_request_requestgen.go b/pkg/exchange/ftx/ftxapi/cancel_order_by_client_order_id_request_requestgen.go new file mode 100644 index 0000000000..23cb4bab39 --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/cancel_order_by_client_order_id_request_requestgen.go @@ -0,0 +1,133 @@ +// Code generated by "requestgen -method DELETE -url /api/orders/by_client_id/:clientOrderId -type CancelOrderByClientOrderIdRequest -responseType .APIResponse"; DO NOT EDIT. + +package ftxapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (c *CancelOrderByClientOrderIdRequest) ClientOrderId(clientOrderId string) *CancelOrderByClientOrderIdRequest { + c.clientOrderId = clientOrderId + return c +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (c *CancelOrderByClientOrderIdRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (c *CancelOrderByClientOrderIdRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (c *CancelOrderByClientOrderIdRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := c.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (c *CancelOrderByClientOrderIdRequest) GetParametersJSON() ([]byte, error) { + params, err := c.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (c *CancelOrderByClientOrderIdRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check clientOrderId field -> json key clientOrderId + clientOrderId := c.clientOrderId + + // TEMPLATE check-required + if len(clientOrderId) == 0 { + return params, fmt.Errorf("clientOrderId is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of clientOrderId + params["clientOrderId"] = clientOrderId + + return params, nil +} + +func (c *CancelOrderByClientOrderIdRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (c *CancelOrderByClientOrderIdRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := c.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (c *CancelOrderByClientOrderIdRequest) Do(ctx context.Context) (*APIResponse, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/api/orders/by_client_id/:clientOrderId" + slugs, err := c.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = c.applySlugsToUrl(apiURL, slugs) + + req, err := c.client.NewAuthenticatedRequest(ctx, "DELETE", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := c.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/ftx/ftxapi/cancel_order_request_requestgen.go b/pkg/exchange/ftx/ftxapi/cancel_order_request_requestgen.go new file mode 100644 index 0000000000..70684c1df8 --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/cancel_order_request_requestgen.go @@ -0,0 +1,133 @@ +// Code generated by "requestgen -method DELETE -url /api/orders/:orderID -type CancelOrderRequest -responseType .APIResponse"; DO NOT EDIT. + +package ftxapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (c *CancelOrderRequest) OrderID(orderID string) *CancelOrderRequest { + c.orderID = orderID + return c +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (c *CancelOrderRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (c *CancelOrderRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (c *CancelOrderRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := c.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (c *CancelOrderRequest) GetParametersJSON() ([]byte, error) { + params, err := c.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (c *CancelOrderRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check orderID field -> json key orderID + orderID := c.orderID + + // TEMPLATE check-required + if len(orderID) == 0 { + return params, fmt.Errorf("orderID is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of orderID + params["orderID"] = orderID + + return params, nil +} + +func (c *CancelOrderRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (c *CancelOrderRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := c.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (c *CancelOrderRequest) Do(ctx context.Context) (*APIResponse, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/api/orders/:orderID" + slugs, err := c.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = c.applySlugsToUrl(apiURL, slugs) + + req, err := c.client.NewAuthenticatedRequest(ctx, "DELETE", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := c.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/ftx/ftxapi/client.go b/pkg/exchange/ftx/ftxapi/client.go new file mode 100644 index 0000000000..2437bd48f9 --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/client.go @@ -0,0 +1,203 @@ +package ftxapi + +//go:generate -command GetRequest requestgen -method GET -responseType .APIResponse -responseDataField Result +//go:generate -command PostRequest requestgen -method POST -responseType .APIResponse -responseDataField Result +//go:generate -command DeleteRequest requestgen -method DELETE -responseType .APIResponse -responseDataField Result + +import ( + "bytes" + "context" + "crypto/hmac" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "net/http" + "net/url" + "strconv" + "time" + + "github.com/c9s/requestgen" + "github.com/pkg/errors" +) + +const defaultHTTPTimeout = time.Second * 15 +const RestBaseURL = "https://ftx.com/api" + +type APIResponse struct { + Success bool `json:"success"` + Result json.RawMessage `json:"result,omitempty"` + HasMoreData bool `json:"hasMoreData,omitempty"` +} + +type RestClient struct { + BaseURL *url.URL + + client *http.Client + + Key, Secret, subAccount string + + /* + AccountService *AccountService + MarketDataService *MarketDataService + TradeService *TradeService + BulletService *BulletService + */ +} + +func NewClient() *RestClient { + u, err := url.Parse(RestBaseURL) + if err != nil { + panic(err) + } + + client := &RestClient{ + BaseURL: u, + client: &http.Client{ + Timeout: defaultHTTPTimeout, + }, + } + + /* + client.AccountService = &AccountService{client: client} + client.MarketDataService = &MarketDataService{client: client} + client.TradeService = &TradeService{client: client} + client.BulletService = &BulletService{client: client} + */ + return client +} + +func (c *RestClient) Auth(key, secret, subAccount string) { + c.Key = key + // pragma: allowlist nextline secret + c.Secret = secret + c.subAccount = subAccount +} + +// NewRequest create new API request. Relative url can be provided in refURL. +func (c *RestClient) NewRequest(ctx context.Context, method, refURL string, params url.Values, payload interface{}) (*http.Request, error) { + rel, err := url.Parse(refURL) + if err != nil { + return nil, err + } + + if params != nil { + rel.RawQuery = params.Encode() + } + + body, err := castPayload(payload) + if err != nil { + return nil, err + } + + pathURL := c.BaseURL.ResolveReference(rel) + return http.NewRequestWithContext(ctx, method, pathURL.String(), bytes.NewReader(body)) +} + +// sendRequest sends the request to the API server and handle the response +func (c *RestClient) SendRequest(req *http.Request) (*requestgen.Response, error) { + resp, err := c.client.Do(req) + if err != nil { + return nil, err + } + + // newResponse reads the response body and return a new Response object + response, err := requestgen.NewResponse(resp) + if err != nil { + return response, err + } + + // Check error, if there is an error, return the ErrorResponse struct type + if response.IsError() { + return response, errors.New(string(response.Body)) + } + + return response, nil +} + +// newAuthenticatedRequest creates new http request for authenticated routes. +func (c *RestClient) NewAuthenticatedRequest(ctx context.Context, method, refURL string, params url.Values, payload interface{}) (*http.Request, error) { + if len(c.Key) == 0 { + return nil, errors.New("empty api key") + } + + if len(c.Secret) == 0 { + return nil, errors.New("empty api secret") + } + + rel, err := url.Parse(refURL) + if err != nil { + return nil, err + } + + if params != nil { + rel.RawQuery = params.Encode() + } + + // pathURL is for sending request + pathURL := c.BaseURL.ResolveReference(rel) + + // path here is used for auth header + path := pathURL.Path + if rel.RawQuery != "" { + path += "?" + rel.RawQuery + } + + body, err := castPayload(payload) + if err != nil { + return nil, err + } + + req, err := http.NewRequestWithContext(ctx, method, pathURL.String(), bytes.NewReader(body)) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", "application/json") + req.Header.Add("Accept", "application/json") + + // Build authentication headers + c.attachAuthHeaders(req, method, path, body) + return req, nil +} + +func (c *RestClient) attachAuthHeaders(req *http.Request, method string, path string, body []byte) { + millisecondTs := time.Now().UnixNano() / int64(time.Millisecond) + ts := strconv.FormatInt(millisecondTs, 10) + p := ts + method + path + string(body) + signature := sign(c.Secret, p) + req.Header.Set("FTX-KEY", c.Key) + req.Header.Set("FTX-SIGN", signature) + req.Header.Set("FTX-TS", ts) + if c.subAccount != "" { + req.Header.Set("FTX-SUBACCOUNT", c.subAccount) + } +} + +// sign uses sha256 to sign the payload with the given secret +func sign(secret, payload string) string { + var sig = hmac.New(sha256.New, []byte(secret)) + _, err := sig.Write([]byte(payload)) + if err != nil { + return "" + } + + return hex.EncodeToString(sig.Sum(nil)) +} + +func castPayload(payload interface{}) ([]byte, error) { + if payload != nil { + switch v := payload.(type) { + case string: + return []byte(v), nil + + case []byte: + return v, nil + + default: + body, err := json.Marshal(v) + return body, err + } + } + + return nil, nil +} diff --git a/pkg/exchange/ftx/ftxapi/client_test.go b/pkg/exchange/ftx/ftxapi/client_test.go new file mode 100644 index 0000000000..a73f595663 --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/client_test.go @@ -0,0 +1,109 @@ +package ftxapi + +import ( + "context" + "os" + "regexp" + "strconv" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +func maskSecret(s string) string { + re := regexp.MustCompile(`\b(\w{4})\w+\b`) + s = re.ReplaceAllString(s, "$1******") + return s +} + +func integrationTestConfigured(t *testing.T) (key, secret string, ok bool) { + var hasKey, hasSecret bool + key, hasKey = os.LookupEnv("FTX_API_KEY") + secret, hasSecret = os.LookupEnv("FTX_API_SECRET") + ok = hasKey && hasSecret && os.Getenv("TEST_FTX") == "1" + if ok { + t.Logf("ftx api integration test enabled, key = %s, secret = %s", maskSecret(key), maskSecret(secret)) + } + return key, secret, ok +} + +func TestClient_Requests(t *testing.T) { + key, secret, ok := integrationTestConfigured(t) + if !ok { + t.SkipNow() + return + } + + ctx, cancel := context.WithTimeout(context.TODO(), 15*time.Second) + defer cancel() + + client := NewClient() + client.Auth(key, secret, "") + + testCases := []struct { + name string + tt func(t *testing.T) + }{ + { + name: "GetMarketsRequest", + tt: func(t *testing.T) { + req := client.NewGetMarketsRequest() + markets, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotNil(t, markets) + t.Logf("markets: %+v", markets) + }, + }, + { + name: "GetAccountRequest", + tt: func(t *testing.T) { + req := client.NewGetAccountRequest() + account, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotNil(t, account) + t.Logf("account: %+v", account) + }, + }, + { + name: "PlaceOrderRequest", + tt: func(t *testing.T) { + req := client.NewPlaceOrderRequest() + req.PostOnly(true). + Size(fixedpoint.MustNewFromString("1.0")). + Price(fixedpoint.MustNewFromString("10.0")). + OrderType(OrderTypeLimit). + Side(SideBuy). + Market("LTC/USDT") + + createdOrder, err := req.Do(ctx) + if assert.NoError(t, err) { + assert.NotNil(t, createdOrder) + t.Logf("createdOrder: %+v", createdOrder) + + req2 := client.NewCancelOrderRequest(strconv.FormatInt(createdOrder.Id, 10)) + ret, err := req2.Do(ctx) + assert.NoError(t, err) + t.Logf("cancelOrder: %+v", ret) + assert.True(t, ret.Success) + } + }, + }, + { + name: "GetFillsRequest", + tt: func(t *testing.T) { + req := client.NewGetFillsRequest() + req.Market("CRO/USD") + fills, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotNil(t, fills) + t.Logf("fills: %+v", fills) + }, + }, + } + for _, testCase := range testCases { + t.Run(testCase.name, testCase.tt) + } +} diff --git a/pkg/exchange/ftx/ftxapi/coin.go b/pkg/exchange/ftx/ftxapi/coin.go new file mode 100644 index 0000000000..ca8d81a55c --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/coin.go @@ -0,0 +1,42 @@ +package ftxapi + +//go:generate -command GetRequest requestgen -method GET -responseType .APIResponse -responseDataField Result +//go:generate -command PostRequest requestgen -method POST -responseType .APIResponse -responseDataField Result +//go:generate -command DeleteRequest requestgen -method DELETE -responseType .APIResponse -responseDataField Result + +import ( + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type Coin struct { + Bep2Asset *string `json:"bep2Asset"` + CanConvert bool `json:"canConvert"` + CanDeposit bool `json:"canDeposit"` + CanWithdraw bool `json:"canWithdraw"` + Collateral bool `json:"collateral"` + CollateralWeight fixedpoint.Value `json:"collateralWeight"` + CreditTo *string `json:"creditTo"` + Erc20Contract string `json:"erc20Contract"` + Fiat bool `json:"fiat"` + HasTag bool `json:"hasTag"` + Id string `json:"id"` + IsToken bool `json:"isToken"` + Methods []string `json:"methods"` + Name string `json:"name"` + SplMint string `json:"splMint"` + Trc20Contract string `json:"trc20Contract"` + UsdFungible bool `json:"usdFungible"` +} + +//go:generate GetRequest -url "api/coins" -type GetCoinsRequest -responseDataType []Coin +type GetCoinsRequest struct { + client requestgen.AuthenticatedAPIClient +} + +func (c *RestClient) NewGetCoinsRequest() *GetCoinsRequest { + return &GetCoinsRequest{ + client: c, + } +} diff --git a/pkg/exchange/ftx/ftxapi/get_account_request_requestgen.go b/pkg/exchange/ftx/ftxapi/get_account_request_requestgen.go new file mode 100644 index 0000000000..ef153bc72a --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/get_account_request_requestgen.go @@ -0,0 +1,115 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Result -url /api/account -type GetAccountRequest -responseDataType .Account"; DO NOT EDIT. + +package ftxapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetAccountRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetAccountRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetAccountRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetAccountRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetAccountRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetAccountRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetAccountRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetAccountRequest) Do(ctx context.Context) (*Account, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/api/account" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data Account + if err := json.Unmarshal(apiResponse.Result, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/ftx/ftxapi/get_balances_request_requestgen.go b/pkg/exchange/ftx/ftxapi/get_balances_request_requestgen.go new file mode 100644 index 0000000000..e67a36299d --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/get_balances_request_requestgen.go @@ -0,0 +1,115 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Result -url /api/wallet/balances -type GetBalancesRequest -responseDataType []Balance"; DO NOT EDIT. + +package ftxapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetBalancesRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetBalancesRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetBalancesRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetBalancesRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetBalancesRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetBalancesRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetBalancesRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetBalancesRequest) Do(ctx context.Context) ([]Balance, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/api/wallet/balances" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []Balance + if err := json.Unmarshal(apiResponse.Result, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/exchange/ftx/ftxapi/get_coins_request_requestgen.go b/pkg/exchange/ftx/ftxapi/get_coins_request_requestgen.go new file mode 100644 index 0000000000..3e5547c795 --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/get_coins_request_requestgen.go @@ -0,0 +1,115 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Result -url api/coins -type GetCoinsRequest -responseDataType []Coin"; DO NOT EDIT. + +package ftxapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetCoinsRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetCoinsRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetCoinsRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetCoinsRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetCoinsRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetCoinsRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetCoinsRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetCoinsRequest) Do(ctx context.Context) ([]Coin, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "api/coins" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []Coin + if err := json.Unmarshal(apiResponse.Result, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/exchange/ftx/ftxapi/get_fills_request_requestgen.go b/pkg/exchange/ftx/ftxapi/get_fills_request_requestgen.go new file mode 100644 index 0000000000..714817710e --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/get_fills_request_requestgen.go @@ -0,0 +1,187 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Result -url /api/fills -type GetFillsRequest -responseDataType []Fill"; DO NOT EDIT. + +package ftxapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" + "strconv" + "time" +) + +func (g *GetFillsRequest) Market(market string) *GetFillsRequest { + g.market = &market + return g +} + +func (g *GetFillsRequest) StartTime(startTime time.Time) *GetFillsRequest { + g.startTime = &startTime + return g +} + +func (g *GetFillsRequest) EndTime(endTime time.Time) *GetFillsRequest { + g.endTime = &endTime + return g +} + +func (g *GetFillsRequest) OrderID(orderID int) *GetFillsRequest { + g.orderID = &orderID + return g +} + +func (g *GetFillsRequest) Order(order string) *GetFillsRequest { + g.order = &order + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetFillsRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check market field -> json key market + if g.market != nil { + market := *g.market + + // assign parameter of market + params["market"] = market + } else { + } + // check startTime field -> json key start_time + if g.startTime != nil { + startTime := *g.startTime + + // assign parameter of startTime + // convert time.Time to seconds time stamp + params["start_time"] = strconv.FormatInt(startTime.Unix(), 10) + } else { + } + // check endTime field -> json key end_time + if g.endTime != nil { + endTime := *g.endTime + + // assign parameter of endTime + // convert time.Time to seconds time stamp + params["end_time"] = strconv.FormatInt(endTime.Unix(), 10) + } else { + } + // check orderID field -> json key orderId + if g.orderID != nil { + orderID := *g.orderID + + // assign parameter of orderID + params["orderId"] = orderID + } else { + } + // check order field -> json key order + if g.order != nil { + order := *g.order + + // assign parameter of order + params["order"] = order + } else { + } + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetFillsRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetFillsRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetFillsRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetFillsRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetFillsRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetFillsRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetFillsRequest) Do(ctx context.Context) ([]Fill, error) { + + // no body params + var params interface{} + query, err := g.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/api/fills" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []Fill + if err := json.Unmarshal(apiResponse.Result, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/exchange/ftx/ftxapi/get_market_request_requestgen.go b/pkg/exchange/ftx/ftxapi/get_market_request_requestgen.go new file mode 100644 index 0000000000..72825a4c29 --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/get_market_request_requestgen.go @@ -0,0 +1,155 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Result -url api/markets/:market -type GetMarketRequest -responseDataType .Market"; DO NOT EDIT. + +package ftxapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +func (g *GetMarketRequest) Market(market string) *GetMarketRequest { + g.market = market + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetMarketRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetMarketRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetMarketRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetMarketRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetMarketRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check market field -> json key market + market := g.market + + // assign parameter of market + params["market"] = market + + return params, nil +} + +func (g *GetMarketRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetMarketRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetMarketRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetMarketRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetMarketRequest) Do(ctx context.Context) (*Market, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "api/markets/:market" + slugs, err := g.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = g.applySlugsToUrl(apiURL, slugs) + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data Market + if err := json.Unmarshal(apiResponse.Result, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/ftx/ftxapi/get_markets_request_requestgen.go b/pkg/exchange/ftx/ftxapi/get_markets_request_requestgen.go new file mode 100644 index 0000000000..db8e591bc8 --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/get_markets_request_requestgen.go @@ -0,0 +1,139 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Result -url api/markets -type GetMarketsRequest -responseDataType []Market"; DO NOT EDIT. + +package ftxapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetMarketsRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetMarketsRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetMarketsRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetMarketsRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetMarketsRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetMarketsRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetMarketsRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetMarketsRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetMarketsRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetMarketsRequest) Do(ctx context.Context) ([]Market, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "api/markets" + + req, err := g.client.NewRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []Market + if err := json.Unmarshal(apiResponse.Result, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/exchange/ftx/ftxapi/get_open_orders_request_requestgen.go b/pkg/exchange/ftx/ftxapi/get_open_orders_request_requestgen.go new file mode 100644 index 0000000000..b36d0fede8 --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/get_open_orders_request_requestgen.go @@ -0,0 +1,128 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Result -url /api/orders -type GetOpenOrdersRequest -responseDataType []Order"; DO NOT EDIT. + +package ftxapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (g *GetOpenOrdersRequest) Market(market string) *GetOpenOrdersRequest { + g.market = market + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetOpenOrdersRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check market field -> json key market + market := g.market + + // assign parameter of market + params["market"] = market + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetOpenOrdersRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetOpenOrdersRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetOpenOrdersRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetOpenOrdersRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetOpenOrdersRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetOpenOrdersRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetOpenOrdersRequest) Do(ctx context.Context) ([]Order, error) { + + // no body params + var params interface{} + query, err := g.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/api/orders" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []Order + if err := json.Unmarshal(apiResponse.Result, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/exchange/ftx/ftxapi/get_order_history_request_requestgen.go b/pkg/exchange/ftx/ftxapi/get_order_history_request_requestgen.go new file mode 100644 index 0000000000..e10a4da17b --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/get_order_history_request_requestgen.go @@ -0,0 +1,158 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Result -url /api/orders/history -type GetOrderHistoryRequest -responseDataType []Order"; DO NOT EDIT. + +package ftxapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" + "strconv" + "time" +) + +func (g *GetOrderHistoryRequest) Market(market string) *GetOrderHistoryRequest { + g.market = market + return g +} + +func (g *GetOrderHistoryRequest) StartTime(startTime time.Time) *GetOrderHistoryRequest { + g.startTime = &startTime + return g +} + +func (g *GetOrderHistoryRequest) EndTime(endTime time.Time) *GetOrderHistoryRequest { + g.endTime = &endTime + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetOrderHistoryRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check market field -> json key market + market := g.market + + // assign parameter of market + params["market"] = market + // check startTime field -> json key start_time + if g.startTime != nil { + startTime := *g.startTime + + // assign parameter of startTime + // convert time.Time to seconds time stamp + params["start_time"] = strconv.FormatInt(startTime.Unix(), 10) + } else { + } + // check endTime field -> json key end_time + if g.endTime != nil { + endTime := *g.endTime + + // assign parameter of endTime + // convert time.Time to seconds time stamp + params["end_time"] = strconv.FormatInt(endTime.Unix(), 10) + } else { + } + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetOrderHistoryRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetOrderHistoryRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetOrderHistoryRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetOrderHistoryRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetOrderHistoryRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetOrderHistoryRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetOrderHistoryRequest) Do(ctx context.Context) ([]Order, error) { + + // no body params + var params interface{} + query, err := g.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/api/orders/history" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []Order + if err := json.Unmarshal(apiResponse.Result, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/exchange/ftx/ftxapi/get_order_status_request_requestgen.go b/pkg/exchange/ftx/ftxapi/get_order_status_request_requestgen.go new file mode 100644 index 0000000000..6b613611a7 --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/get_order_status_request_requestgen.go @@ -0,0 +1,131 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Result -url /api/orders/:orderId -type GetOrderStatusRequest -responseDataType .Order"; DO NOT EDIT. + +package ftxapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (g *GetOrderStatusRequest) OrderID(orderID uint64) *GetOrderStatusRequest { + g.orderID = orderID + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetOrderStatusRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetOrderStatusRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetOrderStatusRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetOrderStatusRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetOrderStatusRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check orderID field -> json key orderId + orderID := g.orderID + + // assign parameter of orderID + params["orderId"] = orderID + + return params, nil +} + +func (g *GetOrderStatusRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetOrderStatusRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetOrderStatusRequest) Do(ctx context.Context) (*Order, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/api/orders/:orderId" + slugs, err := g.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = g.applySlugsToUrl(apiURL, slugs) + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data Order + if err := json.Unmarshal(apiResponse.Result, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/ftx/ftxapi/get_positions_request_requestgen.go b/pkg/exchange/ftx/ftxapi/get_positions_request_requestgen.go new file mode 100644 index 0000000000..d77811f6fe --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/get_positions_request_requestgen.go @@ -0,0 +1,115 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Result -url /api/positions -type GetPositionsRequest -responseDataType []Position"; DO NOT EDIT. + +package ftxapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetPositionsRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetPositionsRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetPositionsRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetPositionsRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetPositionsRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetPositionsRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetPositionsRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetPositionsRequest) Do(ctx context.Context) ([]Position, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/api/positions" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []Position + if err := json.Unmarshal(apiResponse.Result, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/exchange/ftx/ftxapi/market.go b/pkg/exchange/ftx/ftxapi/market.go new file mode 100644 index 0000000000..4cac2fee9e --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/market.go @@ -0,0 +1,59 @@ +package ftxapi + +//go:generate -command GetRequest requestgen -method GET -responseType .APIResponse -responseDataField Result +//go:generate -command PostRequest requestgen -method POST -responseType .APIResponse -responseDataField Result +//go:generate -command DeleteRequest requestgen -method DELETE -responseType .APIResponse -responseDataField Result + +import ( + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type Market struct { + Name string `json:"name"` + BaseCurrency string `json:"baseCurrency"` + QuoteCurrency string `json:"quoteCurrency"` + QuoteVolume24H fixedpoint.Value `json:"quoteVolume24h"` + Change1H fixedpoint.Value `json:"change1h"` + Change24H fixedpoint.Value `json:"change24h"` + ChangeBod fixedpoint.Value `json:"changeBod"` + VolumeUsd24H fixedpoint.Value `json:"volumeUsd24h"` + HighLeverageFeeExempt bool `json:"highLeverageFeeExempt"` + MinProvideSize fixedpoint.Value `json:"minProvideSize"` + Type string `json:"type"` + Underlying string `json:"underlying"` + Enabled bool `json:"enabled"` + Ask fixedpoint.Value `json:"ask"` + Bid fixedpoint.Value `json:"bid"` + Last fixedpoint.Value `json:"last"` + PostOnly bool `json:"postOnly"` + Price fixedpoint.Value `json:"price"` + PriceIncrement fixedpoint.Value `json:"priceIncrement"` + SizeIncrement fixedpoint.Value `json:"sizeIncrement"` + Restricted bool `json:"restricted"` +} + +//go:generate GetRequest -url "api/markets" -type GetMarketsRequest -responseDataType []Market +type GetMarketsRequest struct { + client requestgen.APIClient +} + +func (c *RestClient) NewGetMarketsRequest() *GetMarketsRequest { + return &GetMarketsRequest{ + client: c, + } +} + +//go:generate GetRequest -url "api/markets/:market" -type GetMarketRequest -responseDataType .Market +type GetMarketRequest struct { + client requestgen.AuthenticatedAPIClient + market string `param:"market,slug"` +} + +func (c *RestClient) NewGetMarketRequest(market string) *GetMarketRequest { + return &GetMarketRequest{ + client: c, + market: market, + } +} diff --git a/pkg/exchange/ftx/ftxapi/place_order_request_requestgen.go b/pkg/exchange/ftx/ftxapi/place_order_request_requestgen.go new file mode 100644 index 0000000000..994011ca96 --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/place_order_request_requestgen.go @@ -0,0 +1,219 @@ +// Code generated by "requestgen -method POST -responseType .APIResponse -responseDataField Result -url /api/orders -type PlaceOrderRequest -responseDataType .Order"; DO NOT EDIT. + +package ftxapi + +import ( + "context" + "encoding/json" + "fmt" + "github.com/c9s/bbgo/pkg/fixedpoint" + "net/url" + "regexp" +) + +func (p *PlaceOrderRequest) Market(market string) *PlaceOrderRequest { + p.market = market + return p +} + +func (p *PlaceOrderRequest) Side(side Side) *PlaceOrderRequest { + p.side = side + return p +} + +func (p *PlaceOrderRequest) Price(price fixedpoint.Value) *PlaceOrderRequest { + p.price = price + return p +} + +func (p *PlaceOrderRequest) Size(size fixedpoint.Value) *PlaceOrderRequest { + p.size = size + return p +} + +func (p *PlaceOrderRequest) OrderType(orderType OrderType) *PlaceOrderRequest { + p.orderType = orderType + return p +} + +func (p *PlaceOrderRequest) Ioc(ioc bool) *PlaceOrderRequest { + p.ioc = &ioc + return p +} + +func (p *PlaceOrderRequest) PostOnly(postOnly bool) *PlaceOrderRequest { + p.postOnly = &postOnly + return p +} + +func (p *PlaceOrderRequest) ClientID(clientID string) *PlaceOrderRequest { + p.clientID = &clientID + return p +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (p *PlaceOrderRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (p *PlaceOrderRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check market field -> json key market + market := p.market + + // TEMPLATE check-required + if len(market) == 0 { + return params, fmt.Errorf("market is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of market + params["market"] = market + // check side field -> json key side + side := p.side + + // TEMPLATE check-required + if len(side) == 0 { + return params, fmt.Errorf("side is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of side + params["side"] = side + // check price field -> json key price + price := p.price + + // assign parameter of price + params["price"] = price + // check size field -> json key size + size := p.size + + // assign parameter of size + params["size"] = size + // check orderType field -> json key type + orderType := p.orderType + + // assign parameter of orderType + params["type"] = orderType + // check ioc field -> json key ioc + if p.ioc != nil { + ioc := *p.ioc + + // assign parameter of ioc + params["ioc"] = ioc + } else { + } + // check postOnly field -> json key postOnly + if p.postOnly != nil { + postOnly := *p.postOnly + + // assign parameter of postOnly + params["postOnly"] = postOnly + } else { + } + // check clientID field -> json key clientId + if p.clientID != nil { + clientID := *p.clientID + + // assign parameter of clientID + params["clientId"] = clientID + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (p *PlaceOrderRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := p.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (p *PlaceOrderRequest) GetParametersJSON() ([]byte, error) { + params, err := p.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (p *PlaceOrderRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (p *PlaceOrderRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (p *PlaceOrderRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := p.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (p *PlaceOrderRequest) Do(ctx context.Context) (*Order, error) { + + params, err := p.GetParameters() + if err != nil { + return nil, err + } + query := url.Values{} + + apiURL := "/api/orders" + + req, err := p.client.NewAuthenticatedRequest(ctx, "POST", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := p.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data Order + if err := json.Unmarshal(apiResponse.Result, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/ftx/ftxapi/trade.go b/pkg/exchange/ftx/ftxapi/trade.go new file mode 100644 index 0000000000..323481d20d --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/trade.go @@ -0,0 +1,172 @@ +package ftxapi + +//go:generate -command GetRequest requestgen -method GET -responseType .APIResponse -responseDataField Result +//go:generate -command PostRequest requestgen -method POST -responseType .APIResponse -responseDataField Result +//go:generate -command DeleteRequest requestgen -method DELETE -responseType .APIResponse -responseDataField Result + +import ( + "time" + + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type Order struct { + CreatedAt time.Time `json:"createdAt"` + Future string `json:"future"` + Id int64 `json:"id"` + Market string `json:"market"` + Price fixedpoint.Value `json:"price"` + AvgFillPrice fixedpoint.Value `json:"avgFillPrice"` + Size fixedpoint.Value `json:"size"` + RemainingSize fixedpoint.Value `json:"remainingSize"` + FilledSize fixedpoint.Value `json:"filledSize"` + Side Side `json:"side"` + Status OrderStatus `json:"status"` + Type OrderType `json:"type"` + ReduceOnly bool `json:"reduceOnly"` + Ioc bool `json:"ioc"` + PostOnly bool `json:"postOnly"` + ClientId string `json:"clientId"` +} + +//go:generate GetRequest -url "/api/orders" -type GetOpenOrdersRequest -responseDataType []Order +type GetOpenOrdersRequest struct { + client requestgen.AuthenticatedAPIClient + market string `param:"market,query"` +} + +func (c *RestClient) NewGetOpenOrdersRequest(market string) *GetOpenOrdersRequest { + return &GetOpenOrdersRequest{ + client: c, + market: market, + } +} + +//go:generate GetRequest -url "/api/orders/history" -type GetOrderHistoryRequest -responseDataType []Order +type GetOrderHistoryRequest struct { + client requestgen.AuthenticatedAPIClient + + market string `param:"market,query"` + + startTime *time.Time `param:"start_time,seconds,query"` + endTime *time.Time `param:"end_time,seconds,query"` +} + +func (c *RestClient) NewGetOrderHistoryRequest(market string) *GetOrderHistoryRequest { + return &GetOrderHistoryRequest{ + client: c, + market: market, + } +} + +//go:generate PostRequest -url "/api/orders" -type PlaceOrderRequest -responseDataType .Order +type PlaceOrderRequest struct { + client requestgen.AuthenticatedAPIClient + + market string `param:"market,required"` + side Side `param:"side,required"` + price fixedpoint.Value `param:"price"` + size fixedpoint.Value `param:"size"` + orderType OrderType `param:"type"` + ioc *bool `param:"ioc"` + postOnly *bool `param:"postOnly"` + clientID *string `param:"clientId,optional"` +} + +func (c *RestClient) NewPlaceOrderRequest() *PlaceOrderRequest { + return &PlaceOrderRequest{ + client: c, + } +} + +//go:generate requestgen -method DELETE -url "/api/orders/:orderID" -type CancelOrderRequest -responseType .APIResponse +type CancelOrderRequest struct { + client requestgen.AuthenticatedAPIClient + orderID string `param:"orderID,required,slug"` +} + +func (c *RestClient) NewCancelOrderRequest(orderID string) *CancelOrderRequest { + return &CancelOrderRequest{ + client: c, + orderID: orderID, + } +} + +//go:generate requestgen -method DELETE -url "/api/orders" -type CancelAllOrderRequest -responseType .APIResponse +type CancelAllOrderRequest struct { + client requestgen.AuthenticatedAPIClient + market *string `param:"market"` +} + +func (c *RestClient) NewCancelAllOrderRequest() *CancelAllOrderRequest { + return &CancelAllOrderRequest{ + client: c, + } +} + +//go:generate requestgen -method DELETE -url "/api/orders/by_client_id/:clientOrderId" -type CancelOrderByClientOrderIdRequest -responseType .APIResponse +type CancelOrderByClientOrderIdRequest struct { + client requestgen.AuthenticatedAPIClient + clientOrderId string `param:"clientOrderId,required,slug"` +} + +func (c *RestClient) NewCancelOrderByClientOrderIdRequest(clientOrderId string) *CancelOrderByClientOrderIdRequest { + return &CancelOrderByClientOrderIdRequest{ + client: c, + clientOrderId: clientOrderId, + } +} + +type Fill struct { + // Id is fill ID + Id uint64 `json:"id"` + Future string `json:"future"` + Liquidity Liquidity `json:"liquidity"` + Market string `json:"market"` + BaseCurrency string `json:"baseCurrency"` + QuoteCurrency string `json:"quoteCurrency"` + OrderId uint64 `json:"orderId"` + TradeId uint64 `json:"tradeId"` + Price fixedpoint.Value `json:"price"` + Side Side `json:"side"` + Size fixedpoint.Value `json:"size"` + Time time.Time `json:"time"` + Type string `json:"type"` // always = "order" + Fee fixedpoint.Value `json:"fee"` + FeeCurrency string `json:"feeCurrency"` + FeeRate fixedpoint.Value `json:"feeRate"` +} + +//go:generate GetRequest -url "/api/fills" -type GetFillsRequest -responseDataType []Fill +type GetFillsRequest struct { + client requestgen.AuthenticatedAPIClient + + market *string `param:"market,query"` + startTime *time.Time `param:"start_time,seconds,query"` + endTime *time.Time `param:"end_time,seconds,query"` + orderID *int `param:"orderId,query"` + + // order is the order of the returned records, asc or null + order *string `param:"order,query"` +} + +func (c *RestClient) NewGetFillsRequest() *GetFillsRequest { + return &GetFillsRequest{ + client: c, + } +} + +//go:generate GetRequest -url "/api/orders/:orderId" -type GetOrderStatusRequest -responseDataType .Order +type GetOrderStatusRequest struct { + client requestgen.AuthenticatedAPIClient + orderID uint64 `param:"orderId,slug"` +} + +func (c *RestClient) NewGetOrderStatusRequest(orderID uint64) *GetOrderStatusRequest { + return &GetOrderStatusRequest{ + client: c, + orderID: orderID, + } +} diff --git a/pkg/exchange/ftx/ftxapi/types.go b/pkg/exchange/ftx/ftxapi/types.go new file mode 100644 index 0000000000..fdfe6cd784 --- /dev/null +++ b/pkg/exchange/ftx/ftxapi/types.go @@ -0,0 +1,35 @@ +package ftxapi + +type Liquidity string + +const ( + LiquidityTaker Liquidity = "taker" + LiquidityMaker Liquidity = "maker" +) + +type Side string + +const ( + SideBuy Side = "buy" + SideSell Side = "sell" +) + +type OrderType string + +const ( + OrderTypeLimit OrderType = "limit" + OrderTypeMarket OrderType = "market" + + // trigger order types + OrderTypeStopLimit OrderType = "stop" + OrderTypeTrailingStop OrderType = "trailingStop" + OrderTypeTakeProfit OrderType = "takeProfit" +) + +type OrderStatus string + +const ( + OrderStatusNew OrderStatus = "new" + OrderStatusOpen OrderStatus = "open" + OrderStatusClosed OrderStatus = "closed" +) diff --git a/pkg/exchange/ftx/generate_symbol_map.go b/pkg/exchange/ftx/generate_symbol_map.go new file mode 100644 index 0000000000..b2c68072ea --- /dev/null +++ b/pkg/exchange/ftx/generate_symbol_map.go @@ -0,0 +1,65 @@ +//go:build ignore +// +build ignore + +package main + +import ( + "encoding/json" + "log" + "net/http" + "os" + "strings" + "text/template" +) + +var packageTemplate = template.Must(template.New("").Parse(`// Code generated by go generate; DO NOT EDIT. +package ftx +var symbolMap = map[string]string{ +{{- range $k, $v := . }} + {{ printf "%q" $k }}: {{ printf "%q" $v }}, +{{- end }} +} +`)) + +type Market struct { + Name string `json:"name"` +} + +type ApiResponse struct { + Success bool `json:"success"` + + Result []Market `json:"result"` +} + +func main() { + var data = map[string]string{} + + const url = "https://ftx.com/api/markets" + + resp, err := http.Get(url) + if err != nil { + log.Fatal(err) + return + } + defer resp.Body.Close() + + r := &ApiResponse{} + json.NewDecoder(resp.Body).Decode(r) + + for _, m := range r.Result { + key := strings.ReplaceAll(strings.ToUpper(strings.TrimSpace(m.Name)), "/", "") + data[key] = m.Name + } + + f, err := os.Create("symbols.go") + if err != nil { + log.Fatal(err) + } + + defer f.Close() + + err = packageTemplate.Execute(f, data) + if err != nil { + log.Fatal(err) + } +} diff --git a/pkg/exchange/ftx/orderbook_snapshot.json b/pkg/exchange/ftx/orderbook_snapshot.json new file mode 100644 index 0000000000..ca912fa493 --- /dev/null +++ b/pkg/exchange/ftx/orderbook_snapshot.json @@ -0,0 +1,814 @@ +{ + "channel": "orderbook", + "market": "BTC/USDT", + "type": "partial", + "data": { + "time": 1614520368.9313016, + "checksum": 2150525410, + "bids": [ + [ + 44555.0, + 3.3968 + ], + [ + 44554.0, + 0.0561 + ], + [ + 44548.0, + 0.1683 + ], + [ + 44542.0, + 0.1762 + ], + [ + 44540.0, + 0.0433 + ], + [ + 44539.0, + 4.1616 + ], + [ + 44534.0, + 0.0234 + ], + [ + 44533.0, + 33.1201 + ], + [ + 44532.0, + 8.2272 + ], + [ + 44531.0, + 0.3364 + ], + [ + 44530.0, + 0.0011 + ], + [ + 44527.0, + 0.0074 + ], + [ + 44526.0, + 0.0117 + ], + [ + 44525.0, + 0.4514 + ], + [ + 44520.0, + 0.001 + ], + [ + 44518.0, + 0.1054 + ], + [ + 44517.0, + 0.0077 + ], + [ + 44512.0, + 0.8512 + ], + [ + 44511.0, + 31.8569 + ], + [ + 44510.0, + 0.001 + ], + [ + 44507.0, + 0.0234 + ], + [ + 44506.0, + 0.382 + ], + [ + 44505.0, + 0.0468 + ], + [ + 44501.0, + 0.0082 + ], + [ + 44500.0, + 0.501 + ], + [ + 44498.0, + 0.001 + ], + [ + 44496.0, + 0.0269 + ], + [ + 44490.0, + 0.001 + ], + [ + 44480.0, + 0.001 + ], + [ + 44479.0, + 0.0306 + ], + [ + 44478.0, + 0.01 + ], + [ + 44477.0, + 0.302 + ], + [ + 44470.0, + 0.001 + ], + [ + 44469.0, + 0.0001 + ], + [ + 44460.0, + 0.001 + ], + [ + 44454.0, + 0.001 + ], + [ + 44450.0, + 0.0019 + ], + [ + 44448.0, + 0.0005 + ], + [ + 44440.0, + 0.001 + ], + [ + 44439.0, + 28.9321 + ], + [ + 44430.0, + 0.001 + ], + [ + 44420.0, + 0.001 + ], + [ + 44416.0, + 0.0001 + ], + [ + 44411.0, + 0.0984 + ], + [ + 44410.0, + 0.001 + ], + [ + 44409.0, + 0.001 + ], + [ + 44408.0, + 0.0004 + ], + [ + 44407.0, + 0.0002 + ], + [ + 44400.0, + 0.001 + ], + [ + 44397.0, + 0.0002 + ], + [ + 44391.0, + 0.0004 + ], + [ + 44390.0, + 0.001 + ], + [ + 44389.0, + 43.3904 + ], + [ + 44380.0, + 0.001 + ], + [ + 44376.0, + 0.0001 + ], + [ + 44375.0, + 0.0001 + ], + [ + 44372.0, + 0.0002 + ], + [ + 44370.0, + 0.0012 + ], + [ + 44365.0, + 0.001 + ], + [ + 44363.0, + 0.0004 + ], + [ + 44360.0, + 0.001 + ], + [ + 44354.0, + 54.0385 + ], + [ + 44350.0, + 0.0028 + ], + [ + 44346.0, + 0.0001 + ], + [ + 44340.0, + 0.0013 + ], + [ + 44338.0, + 0.0002 + ], + [ + 44336.0, + 39.6518 + ], + [ + 44333.0, + 0.0001 + ], + [ + 44330.0, + 0.001 + ], + [ + 44329.0, + 0.5014 + ], + [ + 44326.0, + 0.0002 + ], + [ + 44322.0, + 0.001 + ], + [ + 44321.0, + 0.001 + ], + [ + 44320.0, + 0.001 + ], + [ + 44314.0, + 0.0007 + ], + [ + 44310.0, + 0.001 + ], + [ + 44306.0, + 0.0001 + ], + [ + 44300.0, + 33.2836 + ], + [ + 44292.0, + 0.0035 + ], + [ + 44291.0, + 0.0004 + ], + [ + 44290.0, + 0.001 + ], + [ + 44287.0, + 39.717 + ], + [ + 44285.0, + 0.0439 + ], + [ + 44281.0, + 1.0294 + ], + [ + 44280.0, + 0.001 + ], + [ + 44277.0, + 0.001 + ], + [ + 44275.0, + 0.0165 + ], + [ + 44270.0, + 0.001 + ], + [ + 44268.0, + 48.31 + ], + [ + 44260.0, + 0.0011 + ], + [ + 44254.0, + 0.0003 + ], + [ + 44250.0, + 0.0031 + ], + [ + 44246.0, + 0.0002 + ], + [ + 44244.0, + 0.0001 + ], + [ + 44241.0, + 0.0009 + ], + [ + 44240.0, + 0.001 + ], + [ + 44233.0, + 0.001 + ], + [ + 44230.0, + 0.001 + ], + [ + 44224.0, + 0.0001 + ], + [ + 44222.0, + 0.0002 + ] + ], + "asks": [ + [ + 44574.0, + 0.4591 + ], + [ + 44579.0, + 0.15 + ], + [ + 44582.0, + 2.9122 + ], + [ + 44583.0, + 0.1683 + ], + [ + 44584.0, + 0.5 + ], + [ + 44588.0, + 0.0433 + ], + [ + 44590.0, + 8.6379 + ], + [ + 44593.0, + 0.405 + ], + [ + 44595.0, + 0.5988 + ], + [ + 44596.0, + 0.06 + ], + [ + 44605.0, + 0.6927 + ], + [ + 44606.0, + 0.3365 + ], + [ + 44616.0, + 0.1752 + ], + [ + 44617.0, + 0.0215 + ], + [ + 44620.0, + 0.008 + ], + [ + 44629.0, + 0.0078 + ], + [ + 44630.0, + 0.101 + ], + [ + 44631.0, + 0.246 + ], + [ + 44632.0, + 0.01 + ], + [ + 44635.0, + 0.2997 + ], + [ + 44636.0, + 26.777 + ], + [ + 44639.0, + 0.662 + ], + [ + 44642.0, + 0.0078 + ], + [ + 44650.0, + 0.0009 + ], + [ + 44651.0, + 0.0001 + ], + [ + 44652.0, + 0.0079 + ], + [ + 44653.0, + 0.0003 + ], + [ + 44654.0, + 0.354 + ], + [ + 44661.0, + 0.0306 + ], + [ + 44666.0, + 0.0002 + ], + [ + 44667.0, + 0.0009 + ], + [ + 44668.0, + 0.0234 + ], + [ + 44672.0, + 25.923 + ], + [ + 44673.0, + 0.1 + ], + [ + 44674.0, + 0.001 + ], + [ + 44675.0, + 0.0467 + ], + [ + 44678.0, + 0.1286 + ], + [ + 44680.0, + 0.0467 + ], + [ + 44684.0, + 0.0117 + ], + [ + 44687.0, + 0.0351 + ], + [ + 44689.0, + 0.1052 + ], + [ + 44693.0, + 0.0132 + ], + [ + 44699.0, + 0.0984 + ], + [ + 44700.0, + 0.671 + ], + [ + 44709.0, + 0.0007 + ], + [ + 44713.0, + 45.9031 + ], + [ + 44714.0, + 0.0001 + ], + [ + 44719.0, + 0.001 + ], + [ + 44727.0, + 0.0004 + ], + [ + 44728.0, + 0.0002 + ], + [ + 44735.0, + 0.0003 + ], + [ + 44744.0, + 64.7511 + ], + [ + 44750.0, + 0.0018 + ], + [ + 44763.0, + 0.001 + ], + [ + 44775.0, + 0.0006 + ], + [ + 44781.0, + 0.0001 + ], + [ + 44782.0, + 34.2206 + ], + [ + 44784.0, + 0.0001 + ], + [ + 44790.0, + 0.0002 + ], + [ + 44796.0, + 0.001 + ], + [ + 44799.0, + 0.0002 + ], + [ + 44800.0, + 0.0011 + ], + [ + 44806.0, + 0.0165 + ], + [ + 44807.0, + 0.001 + ], + [ + 44813.0, + 0.0001 + ], + [ + 44814.0, + 0.0003 + ], + [ + 44816.0, + 0.0002 + ], + [ + 44820.0, + 38.3495 + ], + [ + 44822.0, + 0.0026 + ], + [ + 44836.0, + 0.0001 + ], + [ + 44846.0, + 50.1127 + ], + [ + 44850.0, + 0.0018 + ], + [ + 44851.0, + 0.001 + ], + [ + 44859.0, + 0.0003 + ], + [ + 44867.0, + 66.5987 + ], + [ + 44876.0, + 1.0294 + ], + [ + 44885.0, + 0.0005 + ], + [ + 44888.0, + 0.0002 + ], + [ + 44889.0, + 0.0003 + ], + [ + 44895.0, + 0.001 + ], + [ + 44897.0, + 0.0443 + ], + [ + 44900.0, + 40.9965 + ], + [ + 44909.0, + 0.0008 + ], + [ + 44913.0, + 0.0001 + ], + [ + 44926.0, + 45.4838 + ], + [ + 44928.0, + 70.5138 + ], + [ + 44938.0, + 0.0005 + ], + [ + 44939.0, + 0.001 + ], + [ + 44949.0, + 0.0004 + ], + [ + 44950.0, + 0.0019 + ], + [ + 44959.0, + 0.0002 + ], + [ + 44962.0, + 0.0002 + ], + [ + 44979.0, + 0.0002 + ], + [ + 44982.0, + 68.1033 + ], + [ + 44983.0, + 0.001 + ], + [ + 44999.0, + 0.0003 + ], + [ + 45000.0, + 0.0273 + ], + [ + 45002.0, + 0.0002 + ], + [ + 45009.0, + 0.0003 + ], + [ + 45010.0, + 0.0003 + ] + ], + "action": "partial" + } +} diff --git a/pkg/exchange/ftx/orderbook_update.json b/pkg/exchange/ftx/orderbook_update.json new file mode 100644 index 0000000000..51931ed803 --- /dev/null +++ b/pkg/exchange/ftx/orderbook_update.json @@ -0,0 +1,26 @@ +{ + "channel": "orderbook", + "market": "BTC/USDT", + "type": "update", + "data": { + "time": 1614737706.650016, + "checksum": 3976343467, + "bids": [ + [ + 48763.0, + 0.5001 + ] + ], + "asks": [ + [ + 48826.0, + 0.3385 + ], + [ + 48929.0, + 26.8713 + ] + ], + "action": "update" + } +} \ No newline at end of file diff --git a/pkg/exchange/ftx/rest.go b/pkg/exchange/ftx/rest.go new file mode 100644 index 0000000000..18282551ca --- /dev/null +++ b/pkg/exchange/ftx/rest.go @@ -0,0 +1,269 @@ +package ftx + +import ( + "bytes" + "context" + "crypto/hmac" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "fmt" + "net/http" + "net/url" + "strconv" + "time" + + "github.com/pkg/errors" + + "github.com/c9s/bbgo/pkg/util" +) + +type transferRequest struct { + *restRequest +} + +type TransferPayload struct { + Coin string + Size float64 + Source string + Destination string +} + +func (r *restRequest) Transfer(ctx context.Context, p TransferPayload) (transferResponse, error) { + resp, err := r. + Method("POST"). + ReferenceURL("api/subaccounts/transfer"). + Payloads(map[string]interface{}{ + "coin": p.Coin, + "size": p.Size, + "source": p.Source, + "destination": p.Destination, + }). + DoAuthenticatedRequest(ctx) + if err != nil { + return transferResponse{}, err + } + + var t transferResponse + if err := json.Unmarshal(resp.Body, &t); err != nil { + return transferResponse{}, fmt.Errorf("failed to unmarshal transfer response body to json: %w", err) + } + + return t, nil +} + +type restRequest struct { + *walletRequest + *marketRequest + *transferRequest + + key, secret string + // Optional sub-account name + sub string + + c *http.Client + baseURL *url.URL + refURL string + // http method, e.g., GET or POST + m string + + // query string + q map[string]string + + // payload + p map[string]interface{} + + // object id + id string +} + +func newRestRequest(c *http.Client, baseURL *url.URL) *restRequest { + r := &restRequest{ + c: c, + baseURL: baseURL, + q: make(map[string]string), + p: make(map[string]interface{}), + } + + r.marketRequest = &marketRequest{restRequest: r} + r.walletRequest = &walletRequest{restRequest: r} + return r +} + +func (r *restRequest) Auth(key, secret string) *restRequest { + r.key = key + // pragma: allowlist nextline secret + r.secret = secret + return r +} + +func (r *restRequest) SubAccount(subAccount string) *restRequest { + r.sub = subAccount + return r +} + +func (r *restRequest) Method(method string) *restRequest { + r.m = method + return r +} + +func (r *restRequest) ReferenceURL(refURL string) *restRequest { + r.refURL = refURL + return r +} + +func (r *restRequest) buildURL() (*url.URL, error) { + u := r.refURL + if len(r.id) > 0 { + u = u + "/" + r.id + } + refURL, err := url.Parse(u) + if err != nil { + return nil, err + } + + return r.baseURL.ResolveReference(refURL), nil +} + +func (r *restRequest) ID(id string) *restRequest { + r.id = id + return r +} + +func (r *restRequest) Payloads(payloads map[string]interface{}) *restRequest { + for k, v := range payloads { + r.p[k] = v + } + return r +} + +func (r *restRequest) Query(query map[string]string) *restRequest { + for k, v := range query { + r.q[k] = v + } + return r +} + +func (r *restRequest) DoAuthenticatedRequest(ctx context.Context) (*util.Response, error) { + req, err := r.newAuthenticatedRequest(ctx) + if err != nil { + return nil, err + } + + return r.sendRequest(req) +} + +func (r *restRequest) newAuthenticatedRequest(ctx context.Context) (*http.Request, error) { + u, err := r.buildURL() + if err != nil { + return nil, err + } + + var jsonPayload []byte + if len(r.p) > 0 { + var err2 error + jsonPayload, err2 = json.Marshal(r.p) + if err2 != nil { + return nil, fmt.Errorf("can't marshal payload map to json: %w", err2) + } + } + + req, err := http.NewRequestWithContext(ctx, r.m, u.String(), bytes.NewBuffer(jsonPayload)) + if err != nil { + return nil, err + } + + ts := strconv.FormatInt(timestamp(), 10) + p := fmt.Sprintf("%s%s%s", ts, r.m, u.Path) + if len(r.q) > 0 { + rq := u.Query() + for k, v := range r.q { + rq.Add(k, v) + } + req.URL.RawQuery = rq.Encode() + p += "?" + req.URL.RawQuery + } + if len(jsonPayload) > 0 { + p += string(jsonPayload) + } + signature := sign(r.secret, p) + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("FTX-KEY", r.key) + req.Header.Set("FTX-SIGN", signature) + req.Header.Set("FTX-TS", ts) + if r.sub != "" { + req.Header.Set("FTX-SUBACCOUNT", r.sub) + } + + return req, nil +} + +func sign(secret, body string) string { + mac := hmac.New(sha256.New, []byte(secret)) + mac.Write([]byte(body)) + return hex.EncodeToString(mac.Sum(nil)) +} + +func timestamp() int64 { + return time.Now().UnixNano() / int64(time.Millisecond) +} + +func (r *restRequest) sendRequest(req *http.Request) (*util.Response, error) { + resp, err := r.c.Do(req) + if err != nil { + return nil, err + } + + // newResponse reads the response body and return a new Response object + response, err := util.NewResponse(resp) + if err != nil { + return response, err + } + + // Check error, if there is an error, return the ErrorResponse struct type + if response.IsError() { + errorResponse, err := toErrorResponse(response) + if err != nil { + return response, err + } + return response, errorResponse + } + + return response, nil +} + +type ErrorResponse struct { + *util.Response + + IsSuccess bool `json:"success"` + ErrorString string `json:"error,omitempty"` +} + +func (r *ErrorResponse) Error() string { + return fmt.Sprintf("%s %s %d, success: %t, err: %s", + r.Response.Request.Method, + r.Response.Request.URL.String(), + r.Response.StatusCode, + r.IsSuccess, + r.ErrorString, + ) +} + +func toErrorResponse(response *util.Response) (*ErrorResponse, error) { + errorResponse := &ErrorResponse{Response: response} + + if response.IsJSON() { + var err = response.DecodeJSON(errorResponse) + if err != nil { + return nil, errors.Wrapf(err, "failed to decode json for response: %d %s", response.StatusCode, string(response.Body)) + } + + if errorResponse.IsSuccess { + return nil, fmt.Errorf("response.Success should be false") + } + return errorResponse, nil + } + + return errorResponse, fmt.Errorf("unexpected response content type %s", response.Header.Get("content-type")) +} diff --git a/pkg/exchange/ftx/rest_market_request.go b/pkg/exchange/ftx/rest_market_request.go new file mode 100644 index 0000000000..aeb41e17a5 --- /dev/null +++ b/pkg/exchange/ftx/rest_market_request.go @@ -0,0 +1,53 @@ +package ftx + +import ( + "context" + "encoding/json" + "fmt" + "strconv" + "time" + + "github.com/c9s/bbgo/pkg/types" +) + +type marketRequest struct { + *restRequest +} + +/* +supported resolutions: window length in seconds. options: 15, 60, 300, 900, 3600, 14400, 86400 +doc: https://docs.ftx.com/?javascript#get-historical-prices +*/ +func (r *marketRequest) HistoricalPrices(ctx context.Context, market string, interval types.Interval, limit int64, start, end *time.Time) (HistoricalPricesResponse, error) { + q := map[string]string{ + "resolution": strconv.FormatInt(int64(interval.Minutes())*60, 10), + } + + if limit > 0 { + q["limit"] = strconv.FormatInt(limit, 10) + } + + if start != nil { + q["start_time"] = strconv.FormatInt(start.Unix(), 10) + } + + if end != nil { + q["end_time"] = strconv.FormatInt(end.Unix(), 10) + } + + resp, err := r. + Method("GET"). + Query(q). + ReferenceURL(fmt.Sprintf("api/markets/%s/candles", market)). + DoAuthenticatedRequest(ctx) + + if err != nil { + return HistoricalPricesResponse{}, err + } + + var h HistoricalPricesResponse + if err := json.Unmarshal(resp.Body, &h); err != nil { + return HistoricalPricesResponse{}, fmt.Errorf("failed to unmarshal historical prices response body to json: %w", err) + } + return h, nil +} diff --git a/pkg/exchange/ftx/rest_responses.go b/pkg/exchange/ftx/rest_responses.go new file mode 100644 index 0000000000..15da5e606d --- /dev/null +++ b/pkg/exchange/ftx/rest_responses.go @@ -0,0 +1,391 @@ +package ftx + +import ( + "fmt" + "strings" + "time" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +// ex: 2019-03-05T09:56:55.728933+00:00 +const timeLayout = "2006-01-02T15:04:05.999999Z07:00" + +type datetime struct { + time.Time +} + +func parseDatetime(s string) (time.Time, error) { + return time.Parse(timeLayout, s) +} + +// used in unit test +func mustParseDatetime(s string) time.Time { + t, err := parseDatetime(s) + if err != nil { + panic(err) + } + return t +} + +func (d *datetime) UnmarshalJSON(b []byte) error { + // remove double quote from json string + s := strings.Trim(string(b), "\"") + if len(s) == 0 { + d.Time = time.Time{} + return nil + } + t, err := parseDatetime(s) + if err != nil { + return err + } + d.Time = t + return nil +} + +/* +{ + "success": true, + "result": { + "backstopProvider": true, + "collateral": 3568181.02691129, + "freeCollateral": 1786071.456884368, + "initialMarginRequirement": 0.12222384240257728, + "leverage": 10, + "liquidating": false, + "maintenanceMarginRequirement": 0.07177992558058484, + "makerFee": 0.0002, + "marginFraction": 0.5588433331419503, + "openMarginFraction": 0.2447194090423075, + "takerFee": 0.0005, + "totalAccountValue": 3568180.98341129, + "totalPositionSize": 6384939.6992, + "username": "user@domain.com", + "positions": [ + { + "cost": -31.7906, + "entryPrice": 138.22, + "future": "ETH-PERP", + "initialMarginRequirement": 0.1, + "longOrderSize": 1744.55, + "maintenanceMarginRequirement": 0.04, + "netSize": -0.23, + "openSize": 1744.32, + "realizedPnl": 3.39441714, + "shortOrderSize": 1732.09, + "side": "sell", + "size": 0.23, + "unrealizedPnl": 0 + } + ] + } +} +*/ +type accountResponse struct { // nolint:golint,deadcode + Success bool `json:"success"` + Result account `json:"result"` +} + +type account struct { + MakerFee fixedpoint.Value `json:"makerFee"` + TakerFee fixedpoint.Value `json:"takerFee"` + TotalAccountValue fixedpoint.Value `json:"totalAccountValue"` +} + +type positionsResponse struct { // nolint:golint,deadcode + Success bool `json:"success"` + Result []position `json:"result"` +} + +/* +{ + "cost": -31.7906, + "entryPrice": 138.22, + "estimatedLiquidationPrice": 152.1, + "future": "ETH-PERP", + "initialMarginRequirement": 0.1, + "longOrderSize": 1744.55, + "maintenanceMarginRequirement": 0.04, + "netSize": -0.23, + "openSize": 1744.32, + "realizedPnl": 3.39441714, + "shortOrderSize": 1732.09, + "side": "sell", + "size": 0.23, + "unrealizedPnl": 0, + "collateralUsed": 3.17906 +} +*/ +type position struct { + Cost fixedpoint.Value `json:"cost"` + EntryPrice fixedpoint.Value `json:"entryPrice"` + EstimatedLiquidationPrice fixedpoint.Value `json:"estimatedLiquidationPrice"` + Future string `json:"future"` + InitialMarginRequirement fixedpoint.Value `json:"initialMarginRequirement"` + LongOrderSize fixedpoint.Value `json:"longOrderSize"` + MaintenanceMarginRequirement fixedpoint.Value `json:"maintenanceMarginRequirement"` + NetSize fixedpoint.Value `json:"netSize"` + OpenSize fixedpoint.Value `json:"openSize"` + RealizedPnl fixedpoint.Value `json:"realizedPnl"` + ShortOrderSize fixedpoint.Value `json:"shortOrderSize"` + Side string `json:"Side"` + Size fixedpoint.Value `json:"size"` + UnrealizedPnl fixedpoint.Value `json:"unrealizedPnl"` + CollateralUsed fixedpoint.Value `json:"collateralUsed"` +} + +type balances struct { // nolint:golint,deadcode + Success bool `json:"success"` + + Result []struct { + Coin string `json:"coin"` + Free fixedpoint.Value `json:"free"` + Total fixedpoint.Value `json:"total"` + } `json:"result"` +} + +/* +[ + { + "name": "BTC/USD", + "enabled": true, + "postOnly": false, + "priceIncrement": 1.0, + "sizeIncrement": 0.0001, + "minProvideSize": 0.0001, + "last": 59039.0, + "bid": 59038.0, + "ask": 59040.0, + "price": 59039.0, + "type": "spot", + "baseCurrency": "BTC", + "quoteCurrency": "USD", + "underlying": null, + "restricted": false, + "highLeverageFeeExempt": true, + "change1h": 0.0015777151969599294, + "change24h": 0.05475756601279165, + "changeBod": -0.0035107262814994852, + "quoteVolume24h": 316493675.5463, + "volumeUsd24h": 316493675.5463 + } +] +*/ +type marketsResponse struct { // nolint:golint,deadcode + Success bool `json:"success"` + Result []market `json:"result"` +} + +type market struct { + Name string `json:"name"` + Enabled bool `json:"enabled"` + PostOnly bool `json:"postOnly"` + PriceIncrement fixedpoint.Value `json:"priceIncrement"` + SizeIncrement fixedpoint.Value `json:"sizeIncrement"` + MinProvideSize fixedpoint.Value `json:"minProvideSize"` + Last fixedpoint.Value `json:"last"` + Bid fixedpoint.Value `json:"bid"` + Ask fixedpoint.Value `json:"ask"` + Price fixedpoint.Value `json:"price"` + Type string `json:"type"` + BaseCurrency string `json:"baseCurrency"` + QuoteCurrency string `json:"quoteCurrency"` + Underlying string `json:"underlying"` + Restricted bool `json:"restricted"` + HighLeverageFeeExempt bool `json:"highLeverageFeeExempt"` + Change1h fixedpoint.Value `json:"change1h"` + Change24h fixedpoint.Value `json:"change24h"` + ChangeBod fixedpoint.Value `json:"changeBod"` + QuoteVolume24h fixedpoint.Value `json:"quoteVolume24h"` + VolumeUsd24h fixedpoint.Value `json:"volumeUsd24h"` +} + +/* +{ + "success": true, + "result": [ + { + "close": 11055.25, + "high": 11089.0, + "low": 11043.5, + "open": 11059.25, + "startTime": "2019-06-24T17:15:00+00:00", + "volume": 464193.95725 + } + ] +} +*/ +type HistoricalPricesResponse struct { + Success bool `json:"success"` + Result []Candle `json:"result"` +} + +type Candle struct { + Close fixedpoint.Value `json:"close"` + High fixedpoint.Value `json:"high"` + Low fixedpoint.Value `json:"low"` + Open fixedpoint.Value `json:"open"` + StartTime datetime `json:"startTime"` + Volume fixedpoint.Value `json:"volume"` +} + +type ordersHistoryResponse struct { // nolint:golint,deadcode + Success bool `json:"success"` + Result []order `json:"result"` + HasMoreData bool `json:"hasMoreData"` +} + +type ordersResponse struct { // nolint:golint,deadcode + Success bool `json:"success"` + + Result []order `json:"result"` +} + +type cancelOrderResponse struct { // nolint:golint,deadcode + Success bool `json:"success"` + Result string `json:"result"` +} + +type order struct { + CreatedAt datetime `json:"createdAt"` + FilledSize fixedpoint.Value `json:"filledSize"` + // Future field is not defined in the response format table but in the response example. + Future string `json:"future"` + ID int64 `json:"id"` + Market string `json:"market"` + Price fixedpoint.Value `json:"price"` + AvgFillPrice fixedpoint.Value `json:"avgFillPrice"` + RemainingSize fixedpoint.Value `json:"remainingSize"` + Side string `json:"side"` + Size fixedpoint.Value `json:"size"` + Status string `json:"status"` + Type string `json:"type"` + ReduceOnly bool `json:"reduceOnly"` + Ioc bool `json:"ioc"` + PostOnly bool `json:"postOnly"` + ClientId string `json:"clientId"` + Liquidation bool `json:"liquidation"` +} + +type orderResponse struct { + Success bool `json:"success"` + + Result order `json:"result"` +} + +/* +{ + "success": true, + "result": [ + { + "coin": "TUSD", + "confirmations": 64, + "confirmedTime": "2019-03-05T09:56:55.728933+00:00", + "fee": 0, + "id": 1, + "sentTime": "2019-03-05T09:56:55.735929+00:00", + "size": 99.0, + "status": "confirmed", + "time": "2019-03-05T09:56:55.728933+00:00", + "txid": "0x8078356ae4b06a036d64747546c274af19581f1c78c510b60505798a7ffcaf1" + } + ] +} +*/ +type depositHistoryResponse struct { + Success bool `json:"success"` + Result []depositHistory `json:"result"` +} + +type depositHistory struct { + ID int64 `json:"id"` + Coin string `json:"coin"` + TxID string `json:"txid"` + Address address `json:"address"` + Confirmations int64 `json:"confirmations"` + ConfirmedTime datetime `json:"confirmedTime"` + Fee fixedpoint.Value `json:"fee"` + SentTime datetime `json:"sentTime"` + Size fixedpoint.Value `json:"size"` + Status string `json:"status"` + Time datetime `json:"time"` + Notes string `json:"notes"` +} + +/** +{ + "address": "test123", + "tag": null, + "method": "ltc", + "coin": null +} +*/ +type address struct { + Address string `json:"address"` + Tag string `json:"tag"` + Method string `json:"method"` + Coin string `json:"coin"` +} + +type fillsResponse struct { + Success bool `json:"success"` + Result []fill `json:"result"` +} + +/* +{ + "id": 123, + "market": "TSLA/USD", + "future": null, + "baseCurrency": "TSLA", + "quoteCurrency": "USD", + "type": "order", + "side": "sell", + "price": 672.5, + "size": 1.0, + "orderId": 456, + "time": "2021-02-23T09:29:08.534000+00:00", + "tradeId": 789, + "feeRate": -5e-6, + "fee": -0.0033625, + "feeCurrency": "USD", + "liquidity": "maker" +} +*/ +type fill struct { + ID int64 `json:"id"` + Market string `json:"market"` + Future string `json:"future"` + BaseCurrency string `json:"baseCurrency"` + QuoteCurrency string `json:"quoteCurrency"` + Type string `json:"type"` + Side types.SideType `json:"side"` + Price fixedpoint.Value `json:"price"` + Size fixedpoint.Value `json:"size"` + OrderId uint64 `json:"orderId"` + Time datetime `json:"time"` + TradeId uint64 `json:"tradeId"` + FeeRate fixedpoint.Value `json:"feeRate"` + Fee fixedpoint.Value `json:"fee"` + FeeCurrency string `json:"feeCurrency"` + Liquidity string `json:"liquidity"` +} + +type transferResponse struct { + Success bool `json:"success"` + Result transfer `json:"result"` +} + +type transfer struct { + Id uint `json:"id"` + Coin string `json:"coin"` + Size fixedpoint.Value `json:"size"` + Time string `json:"time"` + Notes string `json:"notes"` + Status string `json:"status"` +} + +func (t *transfer) String() string { + return fmt.Sprintf("%+v", *t) +} diff --git a/pkg/exchange/ftx/rest_test.go b/pkg/exchange/ftx/rest_test.go new file mode 100644 index 0000000000..ca1adaf3ab --- /dev/null +++ b/pkg/exchange/ftx/rest_test.go @@ -0,0 +1,34 @@ +package ftx + +import ( + "bytes" + "io/ioutil" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/util" +) + +func Test_toErrorResponse(t *testing.T) { + r, err := util.NewResponse(&http.Response{ + Header: http.Header{}, + StatusCode: 200, + Body: ioutil.NopCloser(bytes.NewReader([]byte(`{"Success": true}`))), + }) + assert.NoError(t, err) + + _, err = toErrorResponse(r) + assert.EqualError(t, err, "unexpected response content type ") + r.Header.Set("content-type", "text/json") + + _, err = toErrorResponse(r) + assert.EqualError(t, err, "response.Success should be false") + + r.Body = []byte(`{"error":"Not logged in","Success":false}`) + errResp, err := toErrorResponse(r) + assert.NoError(t, err) + assert.False(t, errResp.IsSuccess) + assert.Equal(t, "Not logged in", errResp.ErrorString) +} diff --git a/pkg/exchange/ftx/rest_wallet_request.go b/pkg/exchange/ftx/rest_wallet_request.go new file mode 100644 index 0000000000..039a325530 --- /dev/null +++ b/pkg/exchange/ftx/rest_wallet_request.go @@ -0,0 +1,44 @@ +package ftx + +import ( + "context" + "encoding/json" + "fmt" + "strconv" + "time" +) + +type walletRequest struct { + *restRequest +} + +func (r *walletRequest) DepositHistory(ctx context.Context, since time.Time, until time.Time, limit int) (depositHistoryResponse, error) { + q := make(map[string]string) + if limit > 0 { + q["limit"] = strconv.Itoa(limit) + } + + if since != (time.Time{}) { + q["start_time"] = strconv.FormatInt(since.Unix(), 10) + } + if until != (time.Time{}) { + q["end_time"] = strconv.FormatInt(until.Unix(), 10) + } + + resp, err := r. + Method("GET"). + ReferenceURL("api/wallet/deposits"). + Query(q). + DoAuthenticatedRequest(ctx) + + if err != nil { + return depositHistoryResponse{}, err + } + + var d depositHistoryResponse + if err := json.Unmarshal(resp.Body, &d); err != nil { + return depositHistoryResponse{}, fmt.Errorf("failed to unmarshal deposit history response body to json: %w", err) + } + + return d, nil +} diff --git a/pkg/exchange/ftx/stream.go b/pkg/exchange/ftx/stream.go new file mode 100644 index 0000000000..6a70a249db --- /dev/null +++ b/pkg/exchange/ftx/stream.go @@ -0,0 +1,259 @@ +package ftx + +import ( + "context" + "fmt" + "time" + + "github.com/gorilla/websocket" + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/net/websocketbase" + "github.com/c9s/bbgo/pkg/types" +) + +const endpoint = "wss://ftx.com/ws/" + +type Stream struct { + *types.StandardStream + + ws *websocketbase.WebsocketClientBase + exchange *Exchange + + key string + secret string + subAccount string + + // subscriptions are only accessed in single goroutine environment, so I don't use mutex to protect them + subscriptions []websocketRequest + klineSubscriptions []klineSubscription +} + +type klineSubscription struct { + symbol string + interval types.Interval +} + +func NewStream(key, secret string, subAccount string, e *Exchange) *Stream { + s := &Stream{ + exchange: e, + key: key, + // pragma: allowlist nextline secret + secret: secret, + subAccount: subAccount, + StandardStream: &types.StandardStream{}, + ws: websocketbase.NewWebsocketClientBase(endpoint, 3*time.Second), + } + + s.ws.OnMessage((&messageHandler{StandardStream: s.StandardStream}).handleMessage) + s.ws.OnConnected(func(conn *websocket.Conn) { + subs := []websocketRequest{newLoginRequest(s.key, s.secret, time.Now(), s.subAccount)} + subs = append(subs, s.subscriptions...) + for _, sub := range subs { + if err := conn.WriteJSON(sub); err != nil { + s.ws.EmitError(fmt.Errorf("failed to send subscription: %+v", sub)) + } + } + + s.EmitConnect() + }) + + return s +} + +func (s *Stream) Connect(ctx context.Context) error { + // If it's not public only, let's do the authentication. + if !s.PublicOnly { + s.subscribePrivateEvents() + } + + if err := s.ws.Connect(ctx); err != nil { + return err + } + s.EmitStart() + + go s.pollKLines(ctx) + go s.pollBalances(ctx) + + go func() { + // https://docs.ftx.com/?javascript#request-process + tk := time.NewTicker(15 * time.Second) + defer tk.Stop() + for { + select { + case <-ctx.Done(): + if err := ctx.Err(); err != nil && !errors.Is(err, context.Canceled) { + logger.WithError(err).Errorf("context returned error") + } + + case <-tk.C: + if err := s.ws.Conn().WriteJSON(websocketRequest{ + Operation: ping, + }); err != nil { + logger.WithError(err).Warnf("failed to ping, try in next tick") + } + } + } + }() + return nil +} + +func (s *Stream) subscribePrivateEvents() { + s.addSubscription(websocketRequest{ + Operation: subscribe, + Channel: privateOrdersChannel, + }) + s.addSubscription(websocketRequest{ + Operation: subscribe, + Channel: privateTradesChannel, + }) +} + +func (s *Stream) addSubscription(request websocketRequest) { + s.subscriptions = append(s.subscriptions, request) +} + +func (s *Stream) Subscribe(channel types.Channel, symbol string, option types.SubscribeOptions) { + switch channel { + case types.BookChannel: + s.addSubscription(websocketRequest{ + Operation: subscribe, + Channel: orderBookChannel, + Market: toLocalSymbol(TrimUpperString(symbol)), + }) + return + case types.BookTickerChannel: + s.addSubscription(websocketRequest{ + Operation: subscribe, + Channel: bookTickerChannel, + Market: toLocalSymbol(TrimUpperString(symbol)), + }) + return + case types.KLineChannel: + // FTX does not support kline channel, do polling + interval := types.Interval(option.Interval) + ks := klineSubscription{symbol: symbol, interval: interval} + s.klineSubscriptions = append(s.klineSubscriptions, ks) + return + case types.MarketTradeChannel: + s.addSubscription(websocketRequest{ + Operation: subscribe, + Channel: marketTradeChannel, + Market: toLocalSymbol(TrimUpperString(symbol)), + }) + return + default: + panic("only support book/kline/trade channel now") + } +} + +func (s *Stream) pollBalances(ctx context.Context) { + ticker := time.NewTicker(15 * time.Second) + defer ticker.Stop() + + for { + select { + case <-ctx.Done(): + return + + case <-ticker.C: + balances, err := s.exchange.QueryAccountBalances(ctx) + if err != nil { + log.WithError(err).Errorf("query balance error") + continue + } + s.EmitBalanceSnapshot(balances) + } + } +} + +func (s *Stream) pollKLines(ctx context.Context) { + lastClosed := make(map[string]map[types.Interval]time.Time, 0) + // get current kline candle + for _, sub := range s.klineSubscriptions { + klines := getLast2KLine(s.exchange, ctx, sub.symbol, sub.interval) + lastClosed[sub.symbol] = make(map[types.Interval]time.Time, 0) + if len(klines) > 0 { + // handle mutiple klines, get the latest one + if lastClosed[sub.symbol][sub.interval].Unix() < klines[0].StartTime.Unix() { + s.EmitKLine(klines[0]) + s.EmitKLineClosed(klines[0]) + lastClosed[sub.symbol][sub.interval] = klines[0].StartTime.Time() + } + + if len(klines) > 1 { + s.EmitKLine(klines[1]) + } + } + } + + // the highest resolution of kline is 1min + ticker := time.NewTicker(time.Second * 30) + defer ticker.Stop() + + for { + select { + case <-ctx.Done(): + if err := ctx.Err(); err != nil && !errors.Is(err, context.Canceled) { + logger.WithError(err).Errorf("context returned error") + } + return + case <-ticker.C: + now := time.Now().Truncate(time.Minute) + for _, sub := range s.klineSubscriptions { + subTime := now.Truncate(sub.interval.Duration()) + if now != subTime { + // not in the checking time slot, check next subscription + continue + } + klines := getLast2KLine(s.exchange, ctx, sub.symbol, sub.interval) + + if len(klines) > 0 { + // handle mutiple klines, get the latest one + if lastClosed[sub.symbol][sub.interval].Unix() < klines[0].StartTime.Unix() { + s.EmitKLine(klines[0]) + s.EmitKLineClosed(klines[0]) + lastClosed[sub.symbol][sub.interval] = klines[0].StartTime.Time() + } + + if len(klines) > 1 { + s.EmitKLine(klines[1]) + } + } + } + } + } +} + +func getLast2KLine(e *Exchange, ctx context.Context, symbol string, interval types.Interval) []types.KLine { + // set since to more 30s ago to avoid getting no kline candle + since := time.Now().Add(time.Duration(interval.Minutes()*-3) * time.Minute) + klines, err := e.QueryKLines(ctx, symbol, interval, types.KLineQueryOptions{ + StartTime: &since, + Limit: 2, + }) + if err != nil { + logger.WithError(err).Errorf("failed to get kline data") + return klines + } + + return klines +} + +func getLastClosedKLine(e *Exchange, ctx context.Context, symbol string, interval types.Interval) []types.KLine { + // set since to more 30s ago to avoid getting no kline candle + klines := getLast2KLine(e, ctx, symbol, interval) + if len(klines) == 0 { + return []types.KLine{} + } + return []types.KLine{klines[0]} +} + +func (s *Stream) Close() error { + s.subscriptions = nil + if s.ws != nil { + return s.ws.Conn().Close() + } + return nil +} diff --git a/pkg/exchange/ftx/stream_message_handler.go b/pkg/exchange/ftx/stream_message_handler.go new file mode 100644 index 0000000000..98744622c1 --- /dev/null +++ b/pkg/exchange/ftx/stream_message_handler.go @@ -0,0 +1,169 @@ +package ftx + +import ( + "encoding/json" + + "github.com/c9s/bbgo/pkg/types" +) + +type messageHandler struct { + *types.StandardStream +} + +func (h *messageHandler) handleMessage(message []byte) { + var r websocketResponse + if err := json.Unmarshal(message, &r); err != nil { + logger.WithError(err).Errorf("failed to unmarshal resp: %s", string(message)) + return + } + + if r.Type == errRespType { + logger.Errorf("receives err: %+v", r) + return + } + + if r.Type == pongRespType { + return + } + + switch r.Channel { + case orderBookChannel: + h.handleOrderBook(r) + case bookTickerChannel: + h.handleBookTicker(r) + case marketTradeChannel: + h.handleMarketTrade(r) + case privateOrdersChannel: + h.handlePrivateOrders(r) + case privateTradesChannel: + h.handleTrades(r) + default: + logger.Warnf("unsupported message type: %+v", r.Type) + } +} + +// {"type": "subscribed", "channel": "orderbook", "market": "BTC/USDT"} +func (h messageHandler) handleSubscribedMessage(response websocketResponse) { + r, err := response.toSubscribedResponse() + if err != nil { + logger.WithError(err).Errorf("failed to convert the subscribed message") + return + } + logger.Info(r) +} + +func (h *messageHandler) handleOrderBook(response websocketResponse) { + if response.Type == subscribedRespType { + h.handleSubscribedMessage(response) + return + } + r, err := response.toPublicOrderBookResponse() + if err != nil { + logger.WithError(err).Errorf("failed to convert the public orderbook") + return + } + + globalOrderBook, err := toGlobalOrderBook(r) + if err != nil { + logger.WithError(err).Errorf("failed to generate orderbook snapshot") + return + } + + switch r.Type { + case partialRespType: + if err := r.verifyChecksum(); err != nil { + logger.WithError(err).Errorf("invalid orderbook snapshot") + return + } + h.EmitBookSnapshot(globalOrderBook) + case updateRespType: + // emit updates, not the whole orderbook + h.EmitBookUpdate(globalOrderBook) + default: + logger.Errorf("unsupported order book data type %s", r.Type) + return + } +} + +func (h *messageHandler) handleMarketTrade(response websocketResponse) { + if response.Type == subscribedRespType { + h.handleSubscribedMessage(response) + return + } + trades, err := response.toMarketTradeResponse() + if err != nil { + logger.WithError(err).Errorf("failed to generate market trade %v", response) + return + } + for _, trade := range trades { + h.EmitMarketTrade(trade) + } +} + +func (h *messageHandler) handleBookTicker(response websocketResponse) { + if response.Type == subscribedRespType { + h.handleSubscribedMessage(response) + return + } + + r, err := response.toBookTickerResponse() + if err != nil { + logger.WithError(err).Errorf("failed to convert the book ticker") + return + } + + globalBookTicker, err := toGlobalBookTicker(r) + if err != nil { + logger.WithError(err).Errorf("failed to generate book ticker") + return + } + + switch r.Type { + case updateRespType: + // emit updates, not the whole orderbook + h.EmitBookTickerUpdate(globalBookTicker) + default: + logger.Errorf("unsupported book ticker data type %s", r.Type) + return + } +} + +func (h *messageHandler) handlePrivateOrders(response websocketResponse) { + if response.Type == subscribedRespType { + h.handleSubscribedMessage(response) + return + } + + r, err := response.toOrderUpdateResponse() + if err != nil { + logger.WithError(err).Errorf("failed to convert the order update response") + return + } + + globalOrder, err := toGlobalOrderNew(r.Data) + if err != nil { + logger.WithError(err).Errorf("failed to convert order update to global order") + return + } + h.EmitOrderUpdate(globalOrder) +} + +func (h *messageHandler) handleTrades(response websocketResponse) { + if response.Type == subscribedRespType { + h.handleSubscribedMessage(response) + return + } + + r, err := response.toTradeUpdateResponse() + if err != nil { + logger.WithError(err).Errorf("failed to convert the trade update response") + return + } + + t, err := toGlobalTrade(r.Data) + if err != nil { + logger.WithError(err).Errorf("failed to convert trade update to global trade ") + return + } + h.EmitTradeUpdate(t) +} diff --git a/pkg/exchange/ftx/stream_message_handler_test.go b/pkg/exchange/ftx/stream_message_handler_test.go new file mode 100644 index 0000000000..1f640211fd --- /dev/null +++ b/pkg/exchange/ftx/stream_message_handler_test.go @@ -0,0 +1,119 @@ +package ftx + +import ( + "database/sql" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func Test_messageHandler_handleMessage(t *testing.T) { + t.Run("handle order update", func(t *testing.T) { + input := []byte(` +{ + "channel": "orders", + "type": "update", + "data": { + "id": 36379, + "clientId": null, + "market": "OXY-PERP", + "type": "limit", + "side": "sell", + "price": 2.7185, + "size": 1.0, + "status": "closed", + "filledSize": 1.0, + "remainingSize": 0.0, + "reduceOnly": false, + "liquidation": false, + "avgFillPrice": 2.7185, + "postOnly": false, + "ioc": false, + "createdAt": "2021-03-28T06:12:50.991447+00:00" + } +} +`) + + h := &messageHandler{StandardStream: &types.StandardStream{}} + i := 0 + h.OnOrderUpdate(func(order types.Order) { + i++ + assert.Equal(t, types.Order{ + SubmitOrder: types.SubmitOrder{ + ClientOrderID: "", + Symbol: "OXY-PERP", + Side: types.SideTypeSell, + Type: types.OrderTypeLimit, + Quantity: fixedpoint.One, + Price: fixedpoint.NewFromFloat(2.7185), + TimeInForce: "GTC", + }, + Exchange: types.ExchangeFTX, + OrderID: 36379, + Status: types.OrderStatusFilled, + ExecutedQuantity: fixedpoint.One, + CreationTime: types.Time(mustParseDatetime("2021-03-28T06:12:50.991447+00:00")), + UpdateTime: types.Time(mustParseDatetime("2021-03-28T06:12:50.991447+00:00")), + }, order) + }) + h.handleMessage(input) + assert.Equal(t, 1, i) + }) + + t.Run("handle trade update", func(t *testing.T) { + input := []byte(` +{ + "channel": "fills", + "type": "update", + "data": { + "id": 23427, + "market": "OXY-PERP", + "future": "OXY-PERP", + "baseCurrency": null, + "quoteCurrency": null, + "type": "order", + "side": "buy", + "price": 2.723, + "size": 1.0, + "orderId": 323789, + "time": "2021-03-28T06:12:34.702926+00:00", + "tradeId": 6276431, + "feeRate": 0.00056525, + "fee": 0.00153917575, + "feeCurrency": "USD", + "liquidity": "taker" + } +} +`) + h := &messageHandler{StandardStream: &types.StandardStream{}} + i := 0 + h.OnTradeUpdate(func(trade types.Trade) { + i++ + assert.Equal(t, types.Trade{ + ID: uint64(6276431), + OrderID: uint64(323789), + Exchange: types.ExchangeFTX, + Price: fixedpoint.NewFromFloat(2.723), + Quantity: fixedpoint.One, + QuoteQuantity: fixedpoint.NewFromFloat(2.723 * 1.0), + Symbol: "OXY-PERP", + Side: types.SideTypeBuy, + IsBuyer: true, + IsMaker: false, + Time: types.Time(mustParseDatetime("2021-03-28T06:12:34.702926+00:00")), + Fee: fixedpoint.NewFromFloat(0.00153917575), + FeeCurrency: "USD", + IsMargin: false, + IsIsolated: false, + IsFutures: true, + StrategyID: sql.NullString{}, + PnL: sql.NullFloat64{}, + }, trade) + }) + h.handleMessage(input) + assert.Equal(t, 1, i) + }) +} diff --git a/pkg/exchange/ftx/symbols.go b/pkg/exchange/ftx/symbols.go new file mode 100644 index 0000000000..33cb022965 --- /dev/null +++ b/pkg/exchange/ftx/symbols.go @@ -0,0 +1,819 @@ +// Code generated by go generate; DO NOT EDIT. +package ftx + +var symbolMap = map[string]string{ + "1INCH-0325": "1INCH-0325", + "1INCH-PERP": "1INCH-PERP", + "1INCHUSD": "1INCH/USD", + "AAPL-0325": "AAPL-0325", + "AAPLUSD": "AAPL/USD", + "AAVE-0325": "AAVE-0325", + "AAVE-PERP": "AAVE-PERP", + "AAVEUSD": "AAVE/USD", + "AAVEUSDT": "AAVE/USDT", + "ABNB-0325": "ABNB-0325", + "ABNBUSD": "ABNB/USD", + "ACB-0325": "ACB-0325", + "ACBUSD": "ACB/USD", + "ADA-0325": "ADA-0325", + "ADA-PERP": "ADA-PERP", + "ADABEARUSD": "ADABEAR/USD", + "ADABULLUSD": "ADABULL/USD", + "ADAHALFUSD": "ADAHALF/USD", + "ADAHEDGEUSD": "ADAHEDGE/USD", + "AGLD-PERP": "AGLD-PERP", + "AGLDUSD": "AGLD/USD", + "AKROUSD": "AKRO/USD", + "AKROUSDT": "AKRO/USDT", + "ALCX-PERP": "ALCX-PERP", + "ALCXUSD": "ALCX/USD", + "ALEPHUSD": "ALEPH/USD", + "ALGO-0325": "ALGO-0325", + "ALGO-PERP": "ALGO-PERP", + "ALGOBEARUSD": "ALGOBEAR/USD", + "ALGOBULLUSD": "ALGOBULL/USD", + "ALGOHALFUSD": "ALGOHALF/USD", + "ALGOHEDGEUSD": "ALGOHEDGE/USD", + "ALICE-PERP": "ALICE-PERP", + "ALICEUSD": "ALICE/USD", + "ALPHA-PERP": "ALPHA-PERP", + "ALPHAUSD": "ALPHA/USD", + "ALT-0325": "ALT-0325", + "ALT-PERP": "ALT-PERP", + "ALTBEARUSD": "ALTBEAR/USD", + "ALTBULLUSD": "ALTBULL/USD", + "ALTHALFUSD": "ALTHALF/USD", + "ALTHEDGEUSD": "ALTHEDGE/USD", + "AMC-0325": "AMC-0325", + "AMCUSD": "AMC/USD", + "AMD-0325": "AMD-0325", + "AMDUSD": "AMD/USD", + "AMPL-PERP": "AMPL-PERP", + "AMPLUSD": "AMPL/USD", + "AMPLUSDT": "AMPL/USDT", + "AMZN-0325": "AMZN-0325", + "AMZNUSD": "AMZN/USD", + "APHAUSD": "APHA/USD", + "AR-PERP": "AR-PERP", + "ARKK-0325": "ARKK-0325", + "ARKKUSD": "ARKK/USD", + "ASD-PERP": "ASD-PERP", + "ASDBEARUSD": "ASDBEAR/USD", + "ASDBEARUSDT": "ASDBEAR/USDT", + "ASDBULLUSD": "ASDBULL/USD", + "ASDBULLUSDT": "ASDBULL/USDT", + "ASDHALFUSD": "ASDHALF/USD", + "ASDHEDGEUSD": "ASDHEDGE/USD", + "ASDUSD": "ASD/USD", + "ATLAS-PERP": "ATLAS-PERP", + "ATLASUSD": "ATLAS/USD", + "ATOM-0325": "ATOM-0325", + "ATOM-PERP": "ATOM-PERP", + "ATOMBEARUSD": "ATOMBEAR/USD", + "ATOMBULLUSD": "ATOMBULL/USD", + "ATOMHALFUSD": "ATOMHALF/USD", + "ATOMHEDGEUSD": "ATOMHEDGE/USD", + "ATOMUSD": "ATOM/USD", + "ATOMUSDT": "ATOM/USDT", + "AUDIO-PERP": "AUDIO-PERP", + "AUDIOUSD": "AUDIO/USD", + "AUDIOUSDT": "AUDIO/USDT", + "AURYUSD": "AURY/USD", + "AVAX-0325": "AVAX-0325", + "AVAX-PERP": "AVAX-PERP", + "AVAXBTC": "AVAX/BTC", + "AVAXUSD": "AVAX/USD", + "AVAXUSDT": "AVAX/USDT", + "AXS-PERP": "AXS-PERP", + "AXSUSD": "AXS/USD", + "BABA-0325": "BABA-0325", + "BABAUSD": "BABA/USD", + "BADGER-PERP": "BADGER-PERP", + "BADGERUSD": "BADGER/USD", + "BAL-0325": "BAL-0325", + "BAL-PERP": "BAL-PERP", + "BALBEARUSD": "BALBEAR/USD", + "BALBEARUSDT": "BALBEAR/USDT", + "BALBULLUSD": "BALBULL/USD", + "BALBULLUSDT": "BALBULL/USDT", + "BALHALFUSD": "BALHALF/USD", + "BALHEDGEUSD": "BALHEDGE/USD", + "BALUSD": "BAL/USD", + "BALUSDT": "BAL/USDT", + "BAND-PERP": "BAND-PERP", + "BANDUSD": "BAND/USD", + "BAO-PERP": "BAO-PERP", + "BAOUSD": "BAO/USD", + "BARUSD": "BAR/USD", + "BAT-PERP": "BAT-PERP", + "BATUSD": "BAT/USD", + "BB-0325": "BB-0325", + "BBUSD": "BB/USD", + "BCH-0325": "BCH-0325", + "BCH-PERP": "BCH-PERP", + "BCHBEARUSD": "BCHBEAR/USD", + "BCHBEARUSDT": "BCHBEAR/USDT", + "BCHBTC": "BCH/BTC", + "BCHBULLUSD": "BCHBULL/USD", + "BCHBULLUSDT": "BCHBULL/USDT", + "BCHHALFUSD": "BCHHALF/USD", + "BCHHEDGEUSD": "BCHHEDGE/USD", + "BCHUSD": "BCH/USD", + "BCHUSDT": "BCH/USDT", + "BEARSHITUSD": "BEARSHIT/USD", + "BEARUSD": "BEAR/USD", + "BEARUSDT": "BEAR/USDT", + "BICOUSD": "BICO/USD", + "BILI-0325": "BILI-0325", + "BILIUSD": "BILI/USD", + "BIT-PERP": "BIT-PERP", + "BITO-0325": "BITO-0325", + "BITOUSD": "BITO/USD", + "BITUSD": "BIT/USD", + "BITW-0325": "BITW-0325", + "BITWUSD": "BITW/USD", + "BLTUSD": "BLT/USD", + "BNB-0325": "BNB-0325", + "BNB-PERP": "BNB-PERP", + "BNBBEARUSD": "BNBBEAR/USD", + "BNBBEARUSDT": "BNBBEAR/USDT", + "BNBBTC": "BNB/BTC", + "BNBBULLUSD": "BNBBULL/USD", + "BNBBULLUSDT": "BNBBULL/USDT", + "BNBHALFUSD": "BNBHALF/USD", + "BNBHEDGEUSD": "BNBHEDGE/USD", + "BNBUSD": "BNB/USD", + "BNBUSDT": "BNB/USDT", + "BNT-PERP": "BNT-PERP", + "BNTUSD": "BNT/USD", + "BNTX-0325": "BNTX-0325", + "BNTXUSD": "BNTX/USD", + "BOBA-PERP": "BOBA-PERP", + "BOBAUSD": "BOBA/USD", + "BOLSONARO2022": "BOLSONARO2022", + "BRZ-PERP": "BRZ-PERP", + "BRZUSD": "BRZ/USD", + "BRZUSDT": "BRZ/USDT", + "BSV-0325": "BSV-0325", + "BSV-PERP": "BSV-PERP", + "BSVBEARUSD": "BSVBEAR/USD", + "BSVBEARUSDT": "BSVBEAR/USDT", + "BSVBULLUSD": "BSVBULL/USD", + "BSVBULLUSDT": "BSVBULL/USDT", + "BSVHALFUSD": "BSVHALF/USD", + "BSVHEDGEUSD": "BSVHEDGE/USD", + "BTC-0325": "BTC-0325", + "BTC-0624": "BTC-0624", + "BTC-MOVE-0303": "BTC-MOVE-0303", + "BTC-MOVE-0304": "BTC-MOVE-0304", + "BTC-MOVE-2022Q1": "BTC-MOVE-2022Q1", + "BTC-MOVE-2022Q2": "BTC-MOVE-2022Q2", + "BTC-MOVE-2022Q3": "BTC-MOVE-2022Q3", + "BTC-MOVE-WK-0304": "BTC-MOVE-WK-0304", + "BTC-MOVE-WK-0311": "BTC-MOVE-WK-0311", + "BTC-MOVE-WK-0318": "BTC-MOVE-WK-0318", + "BTC-MOVE-WK-0325": "BTC-MOVE-WK-0325", + "BTC-PERP": "BTC-PERP", + "BTCBRZ": "BTC/BRZ", + "BTCEUR": "BTC/EUR", + "BTCTRYB": "BTC/TRYB", + "BTCUSD": "BTC/USD", + "BTCUSDT": "BTC/USDT", + "BTT-PERP": "BTT-PERP", + "BTTUSD": "BTT/USD", + "BULLSHITUSD": "BULLSHIT/USD", + "BULLUSD": "BULL/USD", + "BULLUSDT": "BULL/USDT", + "BVOLBTC": "BVOL/BTC", + "BVOLUSD": "BVOL/USD", + "BVOLUSDT": "BVOL/USDT", + "BYND-0325": "BYND-0325", + "BYNDUSD": "BYND/USD", + "C98-PERP": "C98-PERP", + "C98USD": "C98/USD", + "CADUSD": "CAD/USD", + "CAKE-PERP": "CAKE-PERP", + "CEL-0325": "CEL-0325", + "CEL-PERP": "CEL-PERP", + "CELBTC": "CEL/BTC", + "CELO-PERP": "CELO-PERP", + "CELUSD": "CEL/USD", + "CGC-0325": "CGC-0325", + "CGCUSD": "CGC/USD", + "CHR-PERP": "CHR-PERP", + "CHRUSD": "CHR/USD", + "CHZ-0325": "CHZ-0325", + "CHZ-PERP": "CHZ-PERP", + "CHZUSD": "CHZ/USD", + "CHZUSDT": "CHZ/USDT", + "CITYUSD": "CITY/USD", + "CLV-PERP": "CLV-PERP", + "CLVUSD": "CLV/USD", + "COINUSD": "COIN/USD", + "COMP-0325": "COMP-0325", + "COMP-PERP": "COMP-PERP", + "COMPBEARUSD": "COMPBEAR/USD", + "COMPBEARUSDT": "COMPBEAR/USDT", + "COMPBULLUSD": "COMPBULL/USD", + "COMPBULLUSDT": "COMPBULL/USDT", + "COMPHALFUSD": "COMPHALF/USD", + "COMPHEDGEUSD": "COMPHEDGE/USD", + "COMPUSD": "COMP/USD", + "COMPUSDT": "COMP/USDT", + "CONV-PERP": "CONV-PERP", + "CONVUSD": "CONV/USD", + "COPEUSD": "COPE/USD", + "CQTUSD": "CQT/USD", + "CREAM-PERP": "CREAM-PERP", + "CREAMUSD": "CREAM/USD", + "CREAMUSDT": "CREAM/USDT", + "CRO-PERP": "CRO-PERP", + "CRON-0325": "CRON-0325", + "CRONUSD": "CRON/USD", + "CROUSD": "CRO/USD", + "CRV-PERP": "CRV-PERP", + "CRVUSD": "CRV/USD", + "CUSDT-PERP": "CUSDT-PERP", + "CUSDTBEARUSD": "CUSDTBEAR/USD", + "CUSDTBEARUSDT": "CUSDTBEAR/USDT", + "CUSDTBULLUSD": "CUSDTBULL/USD", + "CUSDTBULLUSDT": "CUSDTBULL/USDT", + "CUSDTHALFUSD": "CUSDTHALF/USD", + "CUSDTHEDGEUSD": "CUSDTHEDGE/USD", + "CUSDTUSD": "CUSDT/USD", + "CUSDTUSDT": "CUSDT/USDT", + "CVC-PERP": "CVC-PERP", + "CVCUSD": "CVC/USD", + "DAIUSD": "DAI/USD", + "DAIUSDT": "DAI/USDT", + "DASH-PERP": "DASH-PERP", + "DAWN-PERP": "DAWN-PERP", + "DAWNUSD": "DAWN/USD", + "DEFI-0325": "DEFI-0325", + "DEFI-PERP": "DEFI-PERP", + "DEFIBEARUSD": "DEFIBEAR/USD", + "DEFIBEARUSDT": "DEFIBEAR/USDT", + "DEFIBULLUSD": "DEFIBULL/USD", + "DEFIBULLUSDT": "DEFIBULL/USDT", + "DEFIHALFUSD": "DEFIHALF/USD", + "DEFIHEDGEUSD": "DEFIHEDGE/USD", + "DENT-PERP": "DENT-PERP", + "DENTUSD": "DENT/USD", + "DFLUSD": "DFL/USD", + "DKNG-0325": "DKNG-0325", + "DKNGUSD": "DKNG/USD", + "DMGUSD": "DMG/USD", + "DMGUSDT": "DMG/USDT", + "DODO-PERP": "DODO-PERP", + "DODOUSD": "DODO/USD", + "DOGE-0325": "DOGE-0325", + "DOGE-PERP": "DOGE-PERP", + "DOGEBEAR2021USD": "DOGEBEAR2021/USD", + "DOGEBTC": "DOGE/BTC", + "DOGEBULLUSD": "DOGEBULL/USD", + "DOGEHALFUSD": "DOGEHALF/USD", + "DOGEHEDGEUSD": "DOGEHEDGE/USD", + "DOGEUSD": "DOGE/USD", + "DOGEUSDT": "DOGE/USDT", + "DOT-0325": "DOT-0325", + "DOT-PERP": "DOT-PERP", + "DOTBTC": "DOT/BTC", + "DOTUSD": "DOT/USD", + "DOTUSDT": "DOT/USDT", + "DRGN-0325": "DRGN-0325", + "DRGN-PERP": "DRGN-PERP", + "DRGNBEARUSD": "DRGNBEAR/USD", + "DRGNBULLUSD": "DRGNBULL/USD", + "DRGNHALFUSD": "DRGNHALF/USD", + "DRGNHEDGEUSD": "DRGNHEDGE/USD", + "DYDX-PERP": "DYDX-PERP", + "DYDXUSD": "DYDX/USD", + "EDEN-0325": "EDEN-0325", + "EDEN-PERP": "EDEN-PERP", + "EDENUSD": "EDEN/USD", + "EGLD-PERP": "EGLD-PERP", + "EMBUSD": "EMB/USD", + "ENJ-PERP": "ENJ-PERP", + "ENJUSD": "ENJ/USD", + "ENS-PERP": "ENS-PERP", + "ENSUSD": "ENS/USD", + "EOS-0325": "EOS-0325", + "EOS-PERP": "EOS-PERP", + "EOSBEARUSD": "EOSBEAR/USD", + "EOSBEARUSDT": "EOSBEAR/USDT", + "EOSBULLUSD": "EOSBULL/USD", + "EOSBULLUSDT": "EOSBULL/USDT", + "EOSHALFUSD": "EOSHALF/USD", + "EOSHEDGEUSD": "EOSHEDGE/USD", + "ETC-PERP": "ETC-PERP", + "ETCBEARUSD": "ETCBEAR/USD", + "ETCBULLUSD": "ETCBULL/USD", + "ETCHALFUSD": "ETCHALF/USD", + "ETCHEDGEUSD": "ETCHEDGE/USD", + "ETH-0325": "ETH-0325", + "ETH-0624": "ETH-0624", + "ETH-PERP": "ETH-PERP", + "ETHBEARUSD": "ETHBEAR/USD", + "ETHBEARUSDT": "ETHBEAR/USDT", + "ETHBRZ": "ETH/BRZ", + "ETHBTC": "ETH/BTC", + "ETHBULLUSD": "ETHBULL/USD", + "ETHBULLUSDT": "ETHBULL/USDT", + "ETHE-0325": "ETHE-0325", + "ETHEUR": "ETH/EUR", + "ETHEUSD": "ETHE/USD", + "ETHHALFUSD": "ETHHALF/USD", + "ETHHEDGEUSD": "ETHHEDGE/USD", + "ETHUSD": "ETH/USD", + "ETHUSDT": "ETH/USDT", + "EURTEUR": "EURT/EUR", + "EURTUSD": "EURT/USD", + "EURTUSDT": "EURT/USDT", + "EURUSD": "EUR/USD", + "EXCH-0325": "EXCH-0325", + "EXCH-PERP": "EXCH-PERP", + "EXCHBEARUSD": "EXCHBEAR/USD", + "EXCHBULLUSD": "EXCHBULL/USD", + "EXCHHALFUSD": "EXCHHALF/USD", + "EXCHHEDGEUSD": "EXCHHEDGE/USD", + "FB-0325": "FB-0325", + "FBUSD": "FB/USD", + "FIDA-PERP": "FIDA-PERP", + "FIDAUSD": "FIDA/USD", + "FIDAUSDT": "FIDA/USDT", + "FIL-0325": "FIL-0325", + "FIL-PERP": "FIL-PERP", + "FLM-PERP": "FLM-PERP", + "FLOW-PERP": "FLOW-PERP", + "FRONTUSD": "FRONT/USD", + "FRONTUSDT": "FRONT/USDT", + "FTM-PERP": "FTM-PERP", + "FTMUSD": "FTM/USD", + "FTT-PERP": "FTT-PERP", + "FTTBTC": "FTT/BTC", + "FTTUSD": "FTT/USD", + "FTTUSDT": "FTT/USDT", + "GALA-PERP": "GALA-PERP", + "GALAUSD": "GALA/USD", + "GALUSD": "GAL/USD", + "GARIUSD": "GARI/USD", + "GBPUSD": "GBP/USD", + "GBTC-0325": "GBTC-0325", + "GBTCUSD": "GBTC/USD", + "GDX-0325": "GDX-0325", + "GDXJ-0325": "GDXJ-0325", + "GDXJUSD": "GDXJ/USD", + "GDXUSD": "GDX/USD", + "GENEUSD": "GENE/USD", + "GLD-0325": "GLD-0325", + "GLDUSD": "GLD/USD", + "GLXYUSD": "GLXY/USD", + "GME-0325": "GME-0325", + "GMEUSD": "GME/USD", + "GODSUSD": "GODS/USD", + "GOGUSD": "GOG/USD", + "GOOGL-0325": "GOOGL-0325", + "GOOGLUSD": "GOOGL/USD", + "GRT-0325": "GRT-0325", + "GRT-PERP": "GRT-PERP", + "GRTBEARUSD": "GRTBEAR/USD", + "GRTBULLUSD": "GRTBULL/USD", + "GRTUSD": "GRT/USD", + "GTUSD": "GT/USD", + "HALFSHITUSD": "HALFSHIT/USD", + "HALFUSD": "HALF/USD", + "HBAR-PERP": "HBAR-PERP", + "HEDGESHITUSD": "HEDGESHIT/USD", + "HEDGEUSD": "HEDGE/USD", + "HGETUSD": "HGET/USD", + "HGETUSDT": "HGET/USDT", + "HMTUSD": "HMT/USD", + "HNT-PERP": "HNT-PERP", + "HNTUSD": "HNT/USD", + "HNTUSDT": "HNT/USDT", + "HOLY-PERP": "HOLY-PERP", + "HOLYUSD": "HOLY/USD", + "HOODUSD": "HOOD/USD", + "HOT-PERP": "HOT-PERP", + "HT-PERP": "HT-PERP", + "HTBEARUSD": "HTBEAR/USD", + "HTBULLUSD": "HTBULL/USD", + "HTHALFUSD": "HTHALF/USD", + "HTHEDGEUSD": "HTHEDGE/USD", + "HTUSD": "HT/USD", + "HUM-PERP": "HUM-PERP", + "HUMUSD": "HUM/USD", + "HXROUSD": "HXRO/USD", + "HXROUSDT": "HXRO/USDT", + "IBVOLBTC": "IBVOL/BTC", + "IBVOLUSD": "IBVOL/USD", + "IBVOLUSDT": "IBVOL/USDT", + "ICP-PERP": "ICP-PERP", + "ICX-PERP": "ICX-PERP", + "IMX-PERP": "IMX-PERP", + "IMXUSD": "IMX/USD", + "INTERUSD": "INTER/USD", + "IOTA-PERP": "IOTA-PERP", + "JETUSD": "JET/USD", + "JOEUSD": "JOE/USD", + "JSTUSD": "JST/USD", + "KAVA-PERP": "KAVA-PERP", + "KBTT-PERP": "KBTT-PERP", + "KBTTUSD": "KBTT/USD", + "KIN-PERP": "KIN-PERP", + "KINUSD": "KIN/USD", + "KNC-PERP": "KNC-PERP", + "KNCBEARUSD": "KNCBEAR/USD", + "KNCBEARUSDT": "KNCBEAR/USDT", + "KNCBULLUSD": "KNCBULL/USD", + "KNCBULLUSDT": "KNCBULL/USDT", + "KNCHALFUSD": "KNCHALF/USD", + "KNCHEDGEUSD": "KNCHEDGE/USD", + "KNCUSD": "KNC/USD", + "KNCUSDT": "KNC/USDT", + "KSHIB-PERP": "KSHIB-PERP", + "KSHIBUSD": "KSHIB/USD", + "KSM-PERP": "KSM-PERP", + "KSOS-PERP": "KSOS-PERP", + "KSOSUSD": "KSOS/USD", + "LEO-PERP": "LEO-PERP", + "LEOBEARUSD": "LEOBEAR/USD", + "LEOBULLUSD": "LEOBULL/USD", + "LEOHALFUSD": "LEOHALF/USD", + "LEOHEDGEUSD": "LEOHEDGE/USD", + "LEOUSD": "LEO/USD", + "LINA-PERP": "LINA-PERP", + "LINAUSD": "LINA/USD", + "LINK-0325": "LINK-0325", + "LINK-PERP": "LINK-PERP", + "LINKBEARUSD": "LINKBEAR/USD", + "LINKBEARUSDT": "LINKBEAR/USDT", + "LINKBTC": "LINK/BTC", + "LINKBULLUSD": "LINKBULL/USD", + "LINKBULLUSDT": "LINKBULL/USDT", + "LINKHALFUSD": "LINKHALF/USD", + "LINKHEDGEUSD": "LINKHEDGE/USD", + "LINKUSD": "LINK/USD", + "LINKUSDT": "LINK/USDT", + "LOOKS-PERP": "LOOKS-PERP", + "LOOKSUSD": "LOOKS/USD", + "LRC-PERP": "LRC-PERP", + "LRCUSD": "LRC/USD", + "LTC-0325": "LTC-0325", + "LTC-PERP": "LTC-PERP", + "LTCBEARUSD": "LTCBEAR/USD", + "LTCBEARUSDT": "LTCBEAR/USDT", + "LTCBTC": "LTC/BTC", + "LTCBULLUSD": "LTCBULL/USD", + "LTCBULLUSDT": "LTCBULL/USDT", + "LTCHALFUSD": "LTCHALF/USD", + "LTCHEDGEUSD": "LTCHEDGE/USD", + "LTCUSD": "LTC/USD", + "LTCUSDT": "LTC/USDT", + "LUAUSD": "LUA/USD", + "LUAUSDT": "LUA/USDT", + "LUNA-PERP": "LUNA-PERP", + "LUNAUSD": "LUNA/USD", + "LUNAUSDT": "LUNA/USDT", + "MANA-PERP": "MANA-PERP", + "MANAUSD": "MANA/USD", + "MAPS-PERP": "MAPS-PERP", + "MAPSUSD": "MAPS/USD", + "MAPSUSDT": "MAPS/USDT", + "MATHUSD": "MATH/USD", + "MATHUSDT": "MATH/USDT", + "MATIC-PERP": "MATIC-PERP", + "MATICBEAR2021USD": "MATICBEAR2021/USD", + "MATICBTC": "MATIC/BTC", + "MATICBULLUSD": "MATICBULL/USD", + "MATICHALFUSD": "MATICHALF/USD", + "MATICHEDGEUSD": "MATICHEDGE/USD", + "MATICUSD": "MATIC/USD", + "MBSUSD": "MBS/USD", + "MCB-PERP": "MCB-PERP", + "MCBUSD": "MCB/USD", + "MEDIA-PERP": "MEDIA-PERP", + "MEDIAUSD": "MEDIA/USD", + "MER-PERP": "MER-PERP", + "MERUSD": "MER/USD", + "MID-0325": "MID-0325", + "MID-PERP": "MID-PERP", + "MIDBEARUSD": "MIDBEAR/USD", + "MIDBULLUSD": "MIDBULL/USD", + "MIDHALFUSD": "MIDHALF/USD", + "MIDHEDGEUSD": "MIDHEDGE/USD", + "MINA-PERP": "MINA-PERP", + "MKR-PERP": "MKR-PERP", + "MKRBEARUSD": "MKRBEAR/USD", + "MKRBULLUSD": "MKRBULL/USD", + "MKRUSD": "MKR/USD", + "MKRUSDT": "MKR/USDT", + "MNGO-PERP": "MNGO-PERP", + "MNGOUSD": "MNGO/USD", + "MOBUSD": "MOB/USD", + "MOBUSDT": "MOB/USDT", + "MRNA-0325": "MRNA-0325", + "MRNAUSD": "MRNA/USD", + "MSOLUSD": "MSOL/USD", + "MSTR-0325": "MSTR-0325", + "MSTRUSD": "MSTR/USD", + "MTA-PERP": "MTA-PERP", + "MTAUSD": "MTA/USD", + "MTAUSDT": "MTA/USDT", + "MTL-PERP": "MTL-PERP", + "MTLUSD": "MTL/USD", + "MVDA10-PERP": "MVDA10-PERP", + "MVDA25-PERP": "MVDA25-PERP", + "NEAR-PERP": "NEAR-PERP", + "NEO-PERP": "NEO-PERP", + "NEXOUSD": "NEXO/USD", + "NFLX-0325": "NFLX-0325", + "NFLXUSD": "NFLX/USD", + "NIO-0325": "NIO-0325", + "NIOUSD": "NIO/USD", + "NOK-0325": "NOK-0325", + "NOKUSD": "NOK/USD", + "NVDA-0325": "NVDA-0325", + "NVDAUSD": "NVDA/USD", + "OKB-0325": "OKB-0325", + "OKB-PERP": "OKB-PERP", + "OKBBEARUSD": "OKBBEAR/USD", + "OKBBULLUSD": "OKBBULL/USD", + "OKBHALFUSD": "OKBHALF/USD", + "OKBHEDGEUSD": "OKBHEDGE/USD", + "OKBUSD": "OKB/USD", + "OMG-0325": "OMG-0325", + "OMG-PERP": "OMG-PERP", + "OMGUSD": "OMG/USD", + "ONE-PERP": "ONE-PERP", + "ONT-PERP": "ONT-PERP", + "ORBS-PERP": "ORBS-PERP", + "ORBSUSD": "ORBS/USD", + "OXY-PERP": "OXY-PERP", + "OXYUSD": "OXY/USD", + "OXYUSDT": "OXY/USDT", + "PAXG-PERP": "PAXG-PERP", + "PAXGBEARUSD": "PAXGBEAR/USD", + "PAXGBULLUSD": "PAXGBULL/USD", + "PAXGHALFUSD": "PAXGHALF/USD", + "PAXGHEDGEUSD": "PAXGHEDGE/USD", + "PAXGUSD": "PAXG/USD", + "PAXGUSDT": "PAXG/USDT", + "PENN-0325": "PENN-0325", + "PENNUSD": "PENN/USD", + "PEOPLE-PERP": "PEOPLE-PERP", + "PEOPLEUSD": "PEOPLE/USD", + "PERP-PERP": "PERP-PERP", + "PERPUSD": "PERP/USD", + "PFE-0325": "PFE-0325", + "PFEUSD": "PFE/USD", + "POLIS-PERP": "POLIS-PERP", + "POLISUSD": "POLIS/USD", + "PORTUSD": "PORT/USD", + "PRISMUSD": "PRISM/USD", + "PRIV-0325": "PRIV-0325", + "PRIV-PERP": "PRIV-PERP", + "PRIVBEARUSD": "PRIVBEAR/USD", + "PRIVBULLUSD": "PRIVBULL/USD", + "PRIVHALFUSD": "PRIVHALF/USD", + "PRIVHEDGEUSD": "PRIVHEDGE/USD", + "PROM-PERP": "PROM-PERP", + "PROMUSD": "PROM/USD", + "PSGUSD": "PSG/USD", + "PSYUSD": "PSY/USD", + "PTUUSD": "PTU/USD", + "PUNDIX-PERP": "PUNDIX-PERP", + "PUNDIXUSD": "PUNDIX/USD", + "PYPL-0325": "PYPL-0325", + "PYPLUSD": "PYPL/USD", + "QIUSD": "QI/USD", + "QTUM-PERP": "QTUM-PERP", + "RAMP-PERP": "RAMP-PERP", + "RAMPUSD": "RAMP/USD", + "RAY-PERP": "RAY-PERP", + "RAYUSD": "RAY/USD", + "REALUSD": "REAL/USD", + "REEF-0325": "REEF-0325", + "REEF-PERP": "REEF-PERP", + "REEFUSD": "REEF/USD", + "REN-PERP": "REN-PERP", + "RENUSD": "REN/USD", + "RNDR-PERP": "RNDR-PERP", + "RNDRUSD": "RNDR/USD", + "RON-PERP": "RON-PERP", + "ROOK-PERP": "ROOK-PERP", + "ROOKUSD": "ROOK/USD", + "ROOKUSDT": "ROOK/USDT", + "ROSE-PERP": "ROSE-PERP", + "RSR-PERP": "RSR-PERP", + "RSRUSD": "RSR/USD", + "RUNE-PERP": "RUNE-PERP", + "RUNEUSD": "RUNE/USD", + "RUNEUSDT": "RUNE/USDT", + "SAND-PERP": "SAND-PERP", + "SANDUSD": "SAND/USD", + "SC-PERP": "SC-PERP", + "SCRT-PERP": "SCRT-PERP", + "SECO-PERP": "SECO-PERP", + "SECOUSD": "SECO/USD", + "SHIB-PERP": "SHIB-PERP", + "SHIBUSD": "SHIB/USD", + "SHIT-0325": "SHIT-0325", + "SHIT-PERP": "SHIT-PERP", + "SKL-PERP": "SKL-PERP", + "SKLUSD": "SKL/USD", + "SLNDUSD": "SLND/USD", + "SLP-PERP": "SLP-PERP", + "SLPUSD": "SLP/USD", + "SLRSUSD": "SLRS/USD", + "SLV-0325": "SLV-0325", + "SLVUSD": "SLV/USD", + "SNX-PERP": "SNX-PERP", + "SNXUSD": "SNX/USD", + "SNYUSD": "SNY/USD", + "SOL-0325": "SOL-0325", + "SOL-PERP": "SOL-PERP", + "SOLBTC": "SOL/BTC", + "SOLUSD": "SOL/USD", + "SOLUSDT": "SOL/USDT", + "SOS-PERP": "SOS-PERP", + "SOSUSD": "SOS/USD", + "SPELL-PERP": "SPELL-PERP", + "SPELLUSD": "SPELL/USD", + "SPY-0325": "SPY-0325", + "SPYUSD": "SPY/USD", + "SQ-0325": "SQ-0325", + "SQUSD": "SQ/USD", + "SRM-PERP": "SRM-PERP", + "SRMUSD": "SRM/USD", + "SRMUSDT": "SRM/USDT", + "SRN-PERP": "SRN-PERP", + "STARSUSD": "STARS/USD", + "STEP-PERP": "STEP-PERP", + "STEPUSD": "STEP/USD", + "STETHUSD": "STETH/USD", + "STMX-PERP": "STMX-PERP", + "STMXUSD": "STMX/USD", + "STORJ-PERP": "STORJ-PERP", + "STORJUSD": "STORJ/USD", + "STSOLUSD": "STSOL/USD", + "STX-PERP": "STX-PERP", + "SUNUSD": "SUN/USD", + "SUSHI-0325": "SUSHI-0325", + "SUSHI-PERP": "SUSHI-PERP", + "SUSHIBEARUSD": "SUSHIBEAR/USD", + "SUSHIBTC": "SUSHI/BTC", + "SUSHIBULLUSD": "SUSHIBULL/USD", + "SUSHIUSD": "SUSHI/USD", + "SUSHIUSDT": "SUSHI/USDT", + "SXP-0325": "SXP-0325", + "SXP-PERP": "SXP-PERP", + "SXPBEARUSD": "SXPBEAR/USD", + "SXPBTC": "SXP/BTC", + "SXPBULLUSD": "SXPBULL/USD", + "SXPHALFUSD": "SXPHALF/USD", + "SXPHALFUSDT": "SXPHALF/USDT", + "SXPHEDGEUSD": "SXPHEDGE/USD", + "SXPUSD": "SXP/USD", + "SXPUSDT": "SXP/USDT", + "THETA-0325": "THETA-0325", + "THETA-PERP": "THETA-PERP", + "THETABEARUSD": "THETABEAR/USD", + "THETABULLUSD": "THETABULL/USD", + "THETAHALFUSD": "THETAHALF/USD", + "THETAHEDGEUSD": "THETAHEDGE/USD", + "TLM-PERP": "TLM-PERP", + "TLMUSD": "TLM/USD", + "TLRY-0325": "TLRY-0325", + "TLRYUSD": "TLRY/USD", + "TOMO-PERP": "TOMO-PERP", + "TOMOBEAR2021USD": "TOMOBEAR2021/USD", + "TOMOBULLUSD": "TOMOBULL/USD", + "TOMOHALFUSD": "TOMOHALF/USD", + "TOMOHEDGEUSD": "TOMOHEDGE/USD", + "TOMOUSD": "TOMO/USD", + "TOMOUSDT": "TOMO/USDT", + "TONCOIN-PERP": "TONCOIN-PERP", + "TONCOINUSD": "TONCOIN/USD", + "TRU-PERP": "TRU-PERP", + "TRUMP2024": "TRUMP2024", + "TRUUSD": "TRU/USD", + "TRUUSDT": "TRU/USDT", + "TRX-0325": "TRX-0325", + "TRX-PERP": "TRX-PERP", + "TRXBEARUSD": "TRXBEAR/USD", + "TRXBTC": "TRX/BTC", + "TRXBULLUSD": "TRXBULL/USD", + "TRXHALFUSD": "TRXHALF/USD", + "TRXHEDGEUSD": "TRXHEDGE/USD", + "TRXUSD": "TRX/USD", + "TRXUSDT": "TRX/USDT", + "TRYB-PERP": "TRYB-PERP", + "TRYBBEARUSD": "TRYBBEAR/USD", + "TRYBBULLUSD": "TRYBBULL/USD", + "TRYBHALFUSD": "TRYBHALF/USD", + "TRYBHEDGEUSD": "TRYBHEDGE/USD", + "TRYBUSD": "TRYB/USD", + "TSLA-0325": "TSLA-0325", + "TSLABTC": "TSLA/BTC", + "TSLADOGE": "TSLA/DOGE", + "TSLAUSD": "TSLA/USD", + "TSM-0325": "TSM-0325", + "TSMUSD": "TSM/USD", + "TULIP-PERP": "TULIP-PERP", + "TULIPUSD": "TULIP/USD", + "TWTR-0325": "TWTR-0325", + "TWTRUSD": "TWTR/USD", + "UBER-0325": "UBER-0325", + "UBERUSD": "UBER/USD", + "UBXTUSD": "UBXT/USD", + "UBXTUSDT": "UBXT/USDT", + "UMEEUSD": "UMEE/USD", + "UNI-0325": "UNI-0325", + "UNI-PERP": "UNI-PERP", + "UNIBTC": "UNI/BTC", + "UNISWAP-0325": "UNISWAP-0325", + "UNISWAP-PERP": "UNISWAP-PERP", + "UNISWAPBEARUSD": "UNISWAPBEAR/USD", + "UNISWAPBULLUSD": "UNISWAPBULL/USD", + "UNIUSD": "UNI/USD", + "UNIUSDT": "UNI/USDT", + "USDT-0325": "USDT-0325", + "USDT-PERP": "USDT-PERP", + "USDTBEARUSD": "USDTBEAR/USD", + "USDTBULLUSD": "USDTBULL/USD", + "USDTHALFUSD": "USDTHALF/USD", + "USDTHEDGEUSD": "USDTHEDGE/USD", + "USDTUSD": "USDT/USD", + "USO-0325": "USO-0325", + "USOUSD": "USO/USD", + "UST-PERP": "UST-PERP", + "USTUSD": "UST/USD", + "USTUSDT": "UST/USDT", + "VET-PERP": "VET-PERP", + "VETBEARUSD": "VETBEAR/USD", + "VETBEARUSDT": "VETBEAR/USDT", + "VETBULLUSD": "VETBULL/USD", + "VETBULLUSDT": "VETBULL/USDT", + "VETHEDGEUSD": "VETHEDGE/USD", + "VGXUSD": "VGX/USD", + "WAVES-0325": "WAVES-0325", + "WAVES-PERP": "WAVES-PERP", + "WAVESUSD": "WAVES/USD", + "WBTCBTC": "WBTC/BTC", + "WBTCUSD": "WBTC/USD", + "WNDRUSD": "WNDR/USD", + "WRXUSD": "WRX/USD", + "WRXUSDT": "WRX/USDT", + "WSB-0325": "WSB-0325", + "XAUT-0325": "XAUT-0325", + "XAUT-PERP": "XAUT-PERP", + "XAUTBEARUSD": "XAUTBEAR/USD", + "XAUTBULLUSD": "XAUTBULL/USD", + "XAUTHALFUSD": "XAUTHALF/USD", + "XAUTHEDGEUSD": "XAUTHEDGE/USD", + "XAUTUSD": "XAUT/USD", + "XAUTUSDT": "XAUT/USDT", + "XEM-PERP": "XEM-PERP", + "XLM-PERP": "XLM-PERP", + "XLMBEARUSD": "XLMBEAR/USD", + "XLMBULLUSD": "XLMBULL/USD", + "XMR-PERP": "XMR-PERP", + "XRP-0325": "XRP-0325", + "XRP-PERP": "XRP-PERP", + "XRPBEARUSD": "XRPBEAR/USD", + "XRPBEARUSDT": "XRPBEAR/USDT", + "XRPBTC": "XRP/BTC", + "XRPBULLUSD": "XRPBULL/USD", + "XRPBULLUSDT": "XRPBULL/USDT", + "XRPHALFUSD": "XRPHALF/USD", + "XRPHEDGEUSD": "XRPHEDGE/USD", + "XRPUSD": "XRP/USD", + "XRPUSDT": "XRP/USDT", + "XTZ-0325": "XTZ-0325", + "XTZ-PERP": "XTZ-PERP", + "XTZBEARUSD": "XTZBEAR/USD", + "XTZBEARUSDT": "XTZBEAR/USDT", + "XTZBULLUSD": "XTZBULL/USD", + "XTZBULLUSDT": "XTZBULL/USDT", + "XTZHALFUSD": "XTZHALF/USD", + "XTZHEDGEUSD": "XTZHEDGE/USD", + "YFI-0325": "YFI-0325", + "YFI-PERP": "YFI-PERP", + "YFIBTC": "YFI/BTC", + "YFII-PERP": "YFII-PERP", + "YFIIUSD": "YFII/USD", + "YFIUSD": "YFI/USD", + "YFIUSDT": "YFI/USDT", + "YGGUSD": "YGG/USD", + "ZEC-PERP": "ZEC-PERP", + "ZECBEARUSD": "ZECBEAR/USD", + "ZECBULLUSD": "ZECBULL/USD", + "ZIL-PERP": "ZIL-PERP", + "ZM-0325": "ZM-0325", + "ZMUSD": "ZM/USD", + "ZRX-PERP": "ZRX-PERP", + "ZRXUSD": "ZRX/USD", + "GMTUSD": "GMT/USD", + "GMT-PERP": "GMT-PERP", +} diff --git a/pkg/exchange/ftx/ticker_test.go b/pkg/exchange/ftx/ticker_test.go new file mode 100644 index 0000000000..0bf0197538 --- /dev/null +++ b/pkg/exchange/ftx/ticker_test.go @@ -0,0 +1,54 @@ +package ftx + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestExchange_QueryTickers_AllSymbols(t *testing.T) { + key := os.Getenv("FTX_API_KEY") + secret := os.Getenv("FTX_API_SECRET") + subAccount := os.Getenv("FTX_SUBACCOUNT") + if len(key) == 0 && len(secret) == 0 { + t.Skip("api key/secret are not configured") + } + + e := NewExchange(key, secret, subAccount) + got, err := e.QueryTickers(context.Background()) + if assert.NoError(t, err) { + assert.True(t, len(got) > 1, "binance: attempting to get all symbol tickers, but get 1 or less") + } +} + +func TestExchange_QueryTickers_SomeSymbols(t *testing.T) { + key := os.Getenv("FTX_API_KEY") + secret := os.Getenv("FTX_API_SECRET") + subAccount := os.Getenv("FTX_SUBACCOUNT") + if len(key) == 0 && len(secret) == 0 { + t.Skip("api key/secret are not configured") + } + + e := NewExchange(key, secret, subAccount) + got, err := e.QueryTickers(context.Background(), "BTCUSDT", "ETHUSDT") + if assert.NoError(t, err) { + assert.Len(t, got, 2, "binance: attempting to get two symbols, but number of tickers do not match") + } +} + +func TestExchange_QueryTickers_SingleSymbol(t *testing.T) { + key := os.Getenv("FTX_API_KEY") + secret := os.Getenv("FTX_API_SECRET") + subAccount := os.Getenv("FTX_SUBACCOUNT") + if len(key) == 0 && len(secret) == 0 { + t.Skip("api key/secret are not configured") + } + + e := NewExchange(key, secret, subAccount) + got, err := e.QueryTickers(context.Background(), "BTCUSDT") + if assert.NoError(t, err) { + assert.Len(t, got, 1, "binance: attempting to get one symbol, but number of tickers do not match") + } +} diff --git a/pkg/exchange/ftx/websocket_messages.go b/pkg/exchange/ftx/websocket_messages.go new file mode 100644 index 0000000000..2265e1de9a --- /dev/null +++ b/pkg/exchange/ftx/websocket_messages.go @@ -0,0 +1,468 @@ +package ftx + +import ( + "encoding/json" + "fmt" + "hash/crc32" + "math" + "strconv" + "strings" + "time" + + "github.com/c9s/bbgo/pkg/exchange/ftx/ftxapi" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +type operation string + +const ping operation = "ping" +const login operation = "login" +const subscribe operation = "subscribe" +const unsubscribe operation = "unsubscribe" + +type channel string + +const orderBookChannel channel = "orderbook" +const marketTradeChannel channel = "trades" +const bookTickerChannel channel = "ticker" +const privateOrdersChannel channel = "orders" +const privateTradesChannel channel = "fills" + +var errUnsupportedConversion = fmt.Errorf("unsupported conversion") + +/* +Private: + order update: `{'op': 'subscribe', 'channel': 'orders'}` + login: `{"args": { "key": "", "sign": "", "time": }, "op": "login" }` + +*/ +type websocketRequest struct { + Operation operation `json:"op"` + + // {'op': 'subscribe', 'channel': 'trades', 'market': 'BTC-PERP'} + Channel channel `json:"channel,omitempty"` + Market string `json:"market,omitempty"` + + Login loginArgs `json:"args,omitempty"` +} + +/* +{ + "args": { + "key": "", + "sign": "", + "time": + }, + "op": "login" +} +*/ +type loginArgs struct { + Key string `json:"key"` + Signature string `json:"sign"` + Time int64 `json:"time"` + SubAccount string `json:"subaccount,omitempty"` +} + +func newLoginRequest(key, secret string, t time.Time, subaccount string) websocketRequest { + millis := t.UnixNano() / int64(time.Millisecond) + return websocketRequest{ + Operation: login, + Login: loginArgs{ + Key: key, + Signature: sign(secret, loginBody(millis)), + Time: millis, + SubAccount: subaccount, + }, + } +} + +func loginBody(millis int64) string { + return fmt.Sprintf("%dwebsocket_login", millis) +} + +type respType string + +const pongRespType respType = "pong" +const errRespType respType = "error" +const subscribedRespType respType = "subscribed" +const unsubscribedRespType respType = "unsubscribed" +const infoRespType respType = "info" +const partialRespType respType = "partial" +const updateRespType respType = "update" + +type websocketResponse struct { + mandatoryFields + + optionalFields +} + +type mandatoryFields struct { + Channel channel `json:"channel"` + + Type respType `json:"type"` +} + +type optionalFields struct { + Market string `json:"market"` + + // Example: {"type": "error", "code": 404, "msg": "No such market: BTCUSDT"} + Code int64 `json:"code"` + + Message string `json:"msg"` + + Data json.RawMessage `json:"data"` +} + +type orderUpdateResponse struct { + mandatoryFields + + Data ftxapi.Order `json:"data"` +} + +type trade struct { + Price fixedpoint.Value `json:"price"` + Size fixedpoint.Value `json:"size"` + Side string `json:"side"` + Liquidation bool `json:"liquidation"` + Time time.Time `json:"time"` +} +type tradeResponse struct { + mandatoryFields + Data []trade `json:"data"` +} + +func (r websocketResponse) toMarketTradeResponse() (t []types.Trade, err error) { + if r.Channel != marketTradeChannel { + return t, fmt.Errorf("type %s, channel %s: channel incorrect", r.Type, r.Channel) + } + var tds []trade + if err = json.Unmarshal(r.Data, &tds); err != nil { + return t, err + } + t = make([]types.Trade, len(tds)) + for i, td := range tds { + tt := &t[i] + tt.Exchange = types.ExchangeFTX + tt.Price = td.Price + tt.Quantity = td.Size + tt.QuoteQuantity = td.Size + tt.Symbol = r.Market + tt.Side = types.SideType(TrimUpperString(string(td.Side))) + tt.IsBuyer = true + tt.IsMaker = false + tt.Time = types.Time(td.Time) + } + return t, nil +} + +func (r websocketResponse) toOrderUpdateResponse() (orderUpdateResponse, error) { + if r.Channel != privateOrdersChannel { + return orderUpdateResponse{}, fmt.Errorf("type %s, channel %s: %w", r.Type, r.Channel, errUnsupportedConversion) + } + var o orderUpdateResponse + if err := json.Unmarshal(r.Data, &o.Data); err != nil { + return orderUpdateResponse{}, err + } + o.mandatoryFields = r.mandatoryFields + return o, nil +} + +type tradeUpdateResponse struct { + mandatoryFields + + Data ftxapi.Fill `json:"data"` +} + +func (r websocketResponse) toTradeUpdateResponse() (tradeUpdateResponse, error) { + if r.Channel != privateTradesChannel { + return tradeUpdateResponse{}, fmt.Errorf("type %s, channel %s: %w", r.Type, r.Channel, errUnsupportedConversion) + } + var t tradeUpdateResponse + if err := json.Unmarshal(r.Data, &t.Data); err != nil { + return tradeUpdateResponse{}, err + } + t.mandatoryFields = r.mandatoryFields + return t, nil +} + +/* + Private: + order: {"type": "subscribed", "channel": "orders"} + +Public + orderbook: {"type": "subscribed", "channel": "orderbook", "market": "BTC/USDT"} + +*/ +type subscribedResponse struct { + mandatoryFields + + Market string `json:"market"` +} + +func (s subscribedResponse) String() string { + return fmt.Sprintf("%s channel is subscribed", strings.TrimSpace(fmt.Sprintf("%s %s", s.Market, s.Channel))) +} + +// {"type": "subscribed", "channel": "orderbook", "market": "BTC/USDT"} +func (r websocketResponse) toSubscribedResponse() (subscribedResponse, error) { + if r.Type != subscribedRespType { + return subscribedResponse{}, fmt.Errorf("type %s, channel %s: %w", r.Type, r.Channel, errUnsupportedConversion) + } + + return subscribedResponse{ + mandatoryFields: r.mandatoryFields, + Market: r.Market, + }, nil +} + +// {"type": "error", "code": 400, "msg": "Already logged in"} +type errResponse struct { + Code int64 `json:"code"` + Message string `json:"msg"` +} + +func (e errResponse) String() string { + return fmt.Sprintf("%d: %s", e.Code, e.Message) +} + +func (r websocketResponse) toErrResponse() errResponse { + return errResponse{ + Code: r.Code, + Message: r.Message, + } +} + +// sample :{"bid": 49194.0, "ask": 49195.0, "bidSize": 0.0775, "askSize": 0.0247, "last": 49200.0, "time": 1640171788.9339821} +func (r websocketResponse) toBookTickerResponse() (bookTickerResponse, error) { + if r.Channel != bookTickerChannel { + return bookTickerResponse{}, fmt.Errorf("type %s, channel %s: %w", r.Type, r.Channel, errUnsupportedConversion) + } + + var o bookTickerResponse + if err := json.Unmarshal(r.Data, &o); err != nil { + return bookTickerResponse{}, err + } + + o.mandatoryFields = r.mandatoryFields + o.Market = r.Market + o.Timestamp = nanoToTime(o.Time) + + return o, nil +} + +func (r websocketResponse) toPublicOrderBookResponse() (orderBookResponse, error) { + if r.Channel != orderBookChannel { + return orderBookResponse{}, fmt.Errorf("type %s, channel %s: %w", r.Type, r.Channel, errUnsupportedConversion) + } + + var o orderBookResponse + if err := json.Unmarshal(r.Data, &o); err != nil { + return orderBookResponse{}, err + } + + o.mandatoryFields = r.mandatoryFields + o.Market = r.Market + o.Timestamp = nanoToTime(o.Time) + + return o, nil +} + +func nanoToTime(input float64) time.Time { + sec, dec := math.Modf(input) + return time.Unix(int64(sec), int64(dec*1e9)) +} + +type orderBookResponse struct { + mandatoryFields + + Market string `json:"market"` + + Action string `json:"action"` + + Time float64 `json:"time"` + + Timestamp time.Time + + Checksum uint32 `json:"checksum"` + + // best 100 orders. Ex. {[100,1], [50, 2]} + Bids [][]json.Number `json:"bids"` + + // best 100 orders. Ex. {[51, 1], [102, 3]} + Asks [][]json.Number `json:"asks"` +} + +type bookTickerResponse struct { + mandatoryFields + Market string `json:"market"` + Bid fixedpoint.Value `json:"bid"` + Ask fixedpoint.Value `json:"ask"` + BidSize fixedpoint.Value `json:"bidSize"` + AskSize fixedpoint.Value `json:"askSize"` + Last fixedpoint.Value `json:"last"` + Time float64 `json:"time"` + Timestamp time.Time +} + +// only 100 orders so we use linear search here +func (r *orderBookResponse) update(orderUpdates orderBookResponse) { + r.Checksum = orderUpdates.Checksum + r.updateBids(orderUpdates.Bids) + r.updateAsks(orderUpdates.Asks) +} + +func (r *orderBookResponse) updateAsks(asks [][]json.Number) { + higherPrice := func(dst, src float64) bool { + return dst < src + } + for _, o := range asks { + if remove := o[1] == "0"; remove { + r.Asks = removePrice(r.Asks, o[0]) + } else { + r.Asks = upsertPriceVolume(r.Asks, o, higherPrice) + } + } +} + +func (r *orderBookResponse) updateBids(bids [][]json.Number) { + lessPrice := func(dst, src float64) bool { + return dst > src + } + for _, o := range bids { + if remove := o[1] == "0"; remove { + r.Bids = removePrice(r.Bids, o[0]) + } else { + r.Bids = upsertPriceVolume(r.Bids, o, lessPrice) + } + } +} + +func upsertPriceVolume(dst [][]json.Number, src []json.Number, priceComparator func(dst float64, src float64) bool) [][]json.Number { + for i, pv := range dst { + dstPrice := pv[0] + srcPrice := src[0] + + // update volume + if dstPrice == srcPrice { + pv[1] = src[1] + return dst + } + + // The value must be a number which is verified by json.Unmarshal, so the err + // should never happen. + dstPriceNum, err := strconv.ParseFloat(string(dstPrice), 64) + if err != nil { + logger.WithError(err).Errorf("unexpected price %s", dstPrice) + continue + } + srcPriceNum, err := strconv.ParseFloat(string(srcPrice), 64) + if err != nil { + logger.WithError(err).Errorf("unexpected price updates %s", srcPrice) + continue + } + + if !priceComparator(dstPriceNum, srcPriceNum) { + return insertAt(dst, i, src) + } + } + + return append(dst, src) +} + +func insertAt(dst [][]json.Number, id int, pv []json.Number) (result [][]json.Number) { + result = append(result, dst[:id]...) + result = append(result, pv) + result = append(result, dst[id:]...) + return +} + +func removePrice(dst [][]json.Number, price json.Number) [][]json.Number { + for i, pv := range dst { + if pv[0] == price { + return append(dst[:i], dst[i+1:]...) + } + } + + return dst +} + +func (r orderBookResponse) verifyChecksum() error { + if crc32Val := crc32.ChecksumIEEE([]byte(checksumString(r.Bids, r.Asks))); crc32Val != r.Checksum { + return fmt.Errorf("expected checksum %d, actual checksum %d: %w", r.Checksum, crc32Val, errUnmatchedChecksum) + } + return nil +} + +// :::... +func checksumString(bids, asks [][]json.Number) string { + sb := strings.Builder{} + appendNumber := func(pv []json.Number) { + if sb.Len() != 0 { + sb.WriteString(":") + } + sb.WriteString(string(pv[0])) + sb.WriteString(":") + sb.WriteString(string(pv[1])) + } + + bidsLen := len(bids) + asksLen := len(asks) + for i := 0; i < bidsLen || i < asksLen; i++ { + if i < bidsLen { + appendNumber(bids[i]) + } + if i < asksLen { + appendNumber(asks[i]) + } + } + return sb.String() +} + +var errUnmatchedChecksum = fmt.Errorf("unmatched checksum") + +func toGlobalOrderBook(r orderBookResponse) (types.SliceOrderBook, error) { + bids, err := toPriceVolumeSlice(r.Bids) + if err != nil { + return types.SliceOrderBook{}, fmt.Errorf("can't convert bids to priceVolumeSlice: %w", err) + } + asks, err := toPriceVolumeSlice(r.Asks) + if err != nil { + return types.SliceOrderBook{}, fmt.Errorf("can't convert asks to priceVolumeSlice: %w", err) + } + return types.SliceOrderBook{ + // ex. BTC/USDT + Symbol: toGlobalSymbol(strings.ToUpper(r.Market)), + Bids: bids, + Asks: asks, + }, nil +} + +func toGlobalBookTicker(r bookTickerResponse) (types.BookTicker, error) { + return types.BookTicker{ + // ex. BTC/USDT + Symbol: toGlobalSymbol(strings.ToUpper(r.Market)), + // Time: r.Timestamp, + Buy: r.Bid, + BuySize: r.BidSize, + Sell: r.Ask, + SellSize: r.AskSize, + // Last: r.Last, + }, nil +} + +func toPriceVolumeSlice(orders [][]json.Number) (types.PriceVolumeSlice, error) { + var pv types.PriceVolumeSlice + for _, o := range orders { + p, err := fixedpoint.NewFromString(string(o[0])) + if err != nil { + return nil, fmt.Errorf("can't convert price %+v to fixedpoint: %w", o[0], err) + } + v, err := fixedpoint.NewFromString(string(o[1])) + if err != nil { + return nil, fmt.Errorf("can't convert volume %+v to fixedpoint: %w", o[0], err) + } + pv = append(pv, types.PriceVolume{Price: p, Volume: v}) + } + return pv, nil +} diff --git a/pkg/exchange/ftx/websocket_messages_test.go b/pkg/exchange/ftx/websocket_messages_test.go new file mode 100644 index 0000000000..5a9635e5a1 --- /dev/null +++ b/pkg/exchange/ftx/websocket_messages_test.go @@ -0,0 +1,249 @@ +package ftx + +import ( + "encoding/json" + "io/ioutil" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/exchange/ftx/ftxapi" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func Test_rawResponse_toSubscribedResp(t *testing.T) { + input := `{"type": "subscribed", "channel": "orderbook", "market": "BTC/USDT"}` + var m websocketResponse + assert.NoError(t, json.Unmarshal([]byte(input), &m)) + r, err := m.toSubscribedResponse() + assert.NoError(t, err) + assert.Equal(t, subscribedResponse{ + mandatoryFields: mandatoryFields{ + Channel: orderBookChannel, + Type: subscribedRespType, + }, + Market: "BTC/USDT", + }, r) +} + +func Test_websocketResponse_toPublicOrderBookResponse(t *testing.T) { + f, err := ioutil.ReadFile("./orderbook_snapshot.json") + assert.NoError(t, err) + var m websocketResponse + assert.NoError(t, json.Unmarshal(f, &m)) + r, err := m.toPublicOrderBookResponse() + assert.NoError(t, err) + assert.Equal(t, partialRespType, r.Type) + assert.Equal(t, orderBookChannel, r.Channel) + assert.Equal(t, "BTC/USDT", r.Market) + assert.Equal(t, int64(1614520368), r.Timestamp.Unix()) + assert.Equal(t, uint32(2150525410), r.Checksum) + assert.Len(t, r.Bids, 100) + assert.Equal(t, []json.Number{"44555.0", "3.3968"}, r.Bids[0]) + assert.Equal(t, []json.Number{"44554.0", "0.0561"}, r.Bids[1]) + assert.Len(t, r.Asks, 100) + assert.Equal(t, []json.Number{"44574.0", "0.4591"}, r.Asks[0]) + assert.Equal(t, []json.Number{"44579.0", "0.15"}, r.Asks[1]) +} + +func Test_orderBookResponse_toGlobalOrderBook(t *testing.T) { + f, err := ioutil.ReadFile("./orderbook_snapshot.json") + assert.NoError(t, err) + var m websocketResponse + assert.NoError(t, json.Unmarshal(f, &m)) + r, err := m.toPublicOrderBookResponse() + assert.NoError(t, err) + + b, err := toGlobalOrderBook(r) + assert.NoError(t, err) + assert.Equal(t, "BTCUSDT", b.Symbol) + isValid, err := b.IsValid() + assert.True(t, isValid) + assert.NoError(t, err) + + assert.Len(t, b.Bids, 100) + assert.Equal(t, types.PriceVolume{ + Price: fixedpoint.MustNewFromString("44555.0"), + Volume: fixedpoint.MustNewFromString("3.3968"), + }, b.Bids[0]) + assert.Equal(t, types.PriceVolume{ + Price: fixedpoint.MustNewFromString("44222.0"), + Volume: fixedpoint.MustNewFromString("0.0002"), + }, b.Bids[99]) + + assert.Len(t, b.Asks, 100) + assert.Equal(t, types.PriceVolume{ + Price: fixedpoint.MustNewFromString("44574.0"), + Volume: fixedpoint.MustNewFromString("0.4591"), + }, b.Asks[0]) + assert.Equal(t, types.PriceVolume{ + Price: fixedpoint.MustNewFromString("45010.0"), + Volume: fixedpoint.MustNewFromString("0.0003"), + }, b.Asks[99]) + +} + +func Test_checksumString(t *testing.T) { + type args struct { + bids [][]json.Number + asks [][]json.Number + } + tests := []struct { + name string + args args + want string + }{ + { + name: "more bids", + args: args{ + bids: [][]json.Number{{"5000.5", "10"}, {"4995.0", "5"}}, + asks: [][]json.Number{{"5001.0", "6"}}, + }, + want: "5000.5:10:5001.0:6:4995.0:5", + }, + { + name: "lengths of bids and asks are the same", + args: args{ + bids: [][]json.Number{{"5000.5", "10"}, {"4995.0", "5"}}, + asks: [][]json.Number{{"5001.0", "6"}, {"5002.0", "7"}}, + }, + want: "5000.5:10:5001.0:6:4995.0:5:5002.0:7", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := checksumString(tt.args.bids, tt.args.asks); got != tt.want { + t.Errorf("checksumString() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_orderBookResponse_verifyChecksum(t *testing.T) { + for _, file := range []string{"./orderbook_snapshot.json"} { + f, err := ioutil.ReadFile(file) + assert.NoError(t, err) + var m websocketResponse + assert.NoError(t, json.Unmarshal(f, &m)) + r, err := m.toPublicOrderBookResponse() + assert.NoError(t, err) + assert.NoError(t, r.verifyChecksum(), "filename: "+file) + } +} + +func Test_removePrice(t *testing.T) { + pairs := [][]json.Number{{"123.99", "2.0"}, {"2234.12", "3.1"}} + assert.Equal(t, pairs, removePrice(pairs, "99333")) + + pairs = removePrice(pairs, "2234.12") + assert.Equal(t, [][]json.Number{{"123.99", "2.0"}}, pairs) + assert.Equal(t, [][]json.Number{}, removePrice(pairs, "123.99")) +} + +func Test_orderBookResponse_update(t *testing.T) { + ob := &orderBookResponse{Bids: nil, Asks: nil} + + ob.update(orderBookResponse{ + Bids: [][]json.Number{{"1.0", "0"}, {"10.0", "1"}, {"11.0", "1"}}, + Asks: [][]json.Number{{"1.0", "1"}}, + }) + assert.Equal(t, [][]json.Number{{"11.0", "1"}, {"10.0", "1"}}, ob.Bids) + assert.Equal(t, [][]json.Number{{"1.0", "1"}}, ob.Asks) + ob.update(orderBookResponse{ + Bids: [][]json.Number{{"9.0", "1"}, {"12.0", "1"}, {"10.5", "1"}}, + Asks: [][]json.Number{{"1.0", "0"}}, + }) + assert.Equal(t, [][]json.Number{{"12.0", "1"}, {"11.0", "1"}, {"10.5", "1"}, {"10.0", "1"}, {"9.0", "1"}}, ob.Bids) + assert.Equal(t, [][]json.Number{}, ob.Asks) + + // remove them + ob.update(orderBookResponse{ + Bids: [][]json.Number{{"9.0", "0"}, {"12.0", "0"}, {"10.5", "0"}}, + Asks: [][]json.Number{{"9.0", "1"}, {"12.0", "1"}, {"10.5", "1"}}, + }) + assert.Equal(t, [][]json.Number{{"11.0", "1"}, {"10.0", "1"}}, ob.Bids) + assert.Equal(t, [][]json.Number{{"9.0", "1"}, {"10.5", "1"}, {"12.0", "1"}}, ob.Asks) +} + +func Test_insertAt(t *testing.T) { + r := insertAt([][]json.Number{{"1.2", "2"}, {"1.4", "2"}}, 1, []json.Number{"1.3", "2"}) + assert.Equal(t, [][]json.Number{{"1.2", "2"}, {"1.3", "2"}, {"1.4", "2"}}, r) + + r = insertAt([][]json.Number{{"1.2", "2"}, {"1.4", "2"}}, 0, []json.Number{"1.1", "2"}) + assert.Equal(t, [][]json.Number{{"1.1", "2"}, {"1.2", "2"}, {"1.4", "2"}}, r) + + r = insertAt([][]json.Number{{"1.2", "2"}, {"1.4", "2"}}, 2, []json.Number{"1.5", "2"}) + assert.Equal(t, [][]json.Number{{"1.2", "2"}, {"1.4", "2"}, {"1.5", "2"}}, r) +} + +func Test_newLoginRequest(t *testing.T) { + // From API doc: https://docs.ftx.com/?javascript#authentication-2 + r := newLoginRequest("", "Y2QTHI23f23f23jfjas23f23To0RfUwX3H42fvN-", time.Unix(0, 1557246346499*int64(time.Millisecond)), "") + // pragma: allowlist nextline secret + expectedSignature := "d10b5a67a1a941ae9463a60b285ae845cdeac1b11edc7da9977bef0228b96de9" + assert.Equal(t, expectedSignature, r.Login.Signature) + jsonStr, err := json.Marshal(r) + assert.NoError(t, err) + assert.True(t, strings.Contains(string(jsonStr), expectedSignature)) +} + +func Test_websocketResponse_toOrderUpdateResponse(t *testing.T) { + input := []byte(` +{ + "channel": "orders", + "type": "update", + "data": { + "id": 12345, + "clientId": "test-client-id", + "market": "SOL/USD", + "type": "limit", + "side": "buy", + "price": 0.5, + "size": 100.0, + "status": "closed", + "filledSize": 0.0, + "remainingSize": 0.0, + "reduceOnly": false, + "liquidation": false, + "avgFillPrice": null, + "postOnly": false, + "ioc": false, + "createdAt": "2021-03-27T11:00:36.418674+00:00" + } +} +`) + + var raw websocketResponse + assert.NoError(t, json.Unmarshal(input, &raw)) + + r, err := raw.toOrderUpdateResponse() + assert.NoError(t, err) + + assert.Equal(t, orderUpdateResponse{ + mandatoryFields: mandatoryFields{ + Channel: privateOrdersChannel, + Type: updateRespType, + }, + Data: ftxapi.Order{ + Id: 12345, + ClientId: "test-client-id", + Market: "SOL/USD", + Type: "limit", + Side: "buy", + Price: fixedpoint.NewFromFloat(0.5), + Size: fixedpoint.NewFromInt(100), + Status: "closed", + FilledSize: fixedpoint.Zero, + RemainingSize: fixedpoint.Zero, + ReduceOnly: false, + AvgFillPrice: fixedpoint.Zero, + PostOnly: false, + Ioc: false, + CreatedAt: mustParseDatetime("2021-03-27T11:00:36.418674+00:00"), + Future: "", + }, + }, r) +} diff --git a/pkg/exchange/kucoin/convert.go b/pkg/exchange/kucoin/convert.go new file mode 100644 index 0000000000..d86f84db95 --- /dev/null +++ b/pkg/exchange/kucoin/convert.go @@ -0,0 +1,247 @@ +package kucoin + +import ( + "fmt" + "hash/fnv" + "math" + "strings" + "time" + + "github.com/c9s/bbgo/pkg/exchange/kucoin/kucoinapi" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func toGlobalBalanceMap(accounts []kucoinapi.Account) types.BalanceMap { + balances := types.BalanceMap{} + + // for now, we only return the trading account + for _, account := range accounts { + switch account.Type { + case kucoinapi.AccountTypeTrade: + balances[account.Currency] = types.Balance{ + Currency: account.Currency, + Available: account.Available, + Locked: account.Holds, + } + } + } + + return balances +} + +func toGlobalSymbol(symbol string) string { + return strings.ReplaceAll(symbol, "-", "") +} + +func toGlobalMarket(m kucoinapi.Symbol) types.Market { + symbol := toGlobalSymbol(m.Symbol) + return types.Market{ + Symbol: symbol, + LocalSymbol: m.Symbol, + PricePrecision: int(math.Log10(m.PriceIncrement.Float64())), // convert 0.0001 to 4 + VolumePrecision: int(math.Log10(m.BaseIncrement.Float64())), + QuoteCurrency: m.QuoteCurrency, + BaseCurrency: m.BaseCurrency, + MinNotional: m.QuoteMinSize, + MinAmount: m.QuoteMinSize, + MinQuantity: m.BaseMinSize, + MaxQuantity: fixedpoint.Zero, // not used + StepSize: m.BaseIncrement, + + MinPrice: fixedpoint.Zero, // not used + MaxPrice: fixedpoint.Zero, // not used + TickSize: m.PriceIncrement, + } +} + +func toGlobalTicker(s kucoinapi.Ticker24H) types.Ticker { + return types.Ticker{ + Time: s.Time.Time(), + Volume: s.Volume, + Last: s.Last, + Open: s.Last.Sub(s.ChangePrice), + High: s.High, + Low: s.Low, + Buy: s.Buy, + Sell: s.Sell, + } +} + +func toLocalInterval(i types.Interval) string { + switch i { + case types.Interval1m: + return "1min" + + case types.Interval5m: + return "5min" + + case types.Interval15m: + return "15min" + + case types.Interval30m: + return "30min" + + case types.Interval1h: + return "1hour" + + case types.Interval2h: + return "2hour" + + case types.Interval4h: + return "4hour" + + case types.Interval6h: + return "6hour" + + case types.Interval12h: + return "12hour" + + case types.Interval1d: + return "1day" + + } + + return "1hour" +} + +// convertSubscriptions global subscription to local websocket command +func convertSubscriptions(ss []types.Subscription) ([]WebSocketCommand, error) { + var id = time.Now().UnixNano() / int64(time.Millisecond) + var cmds []WebSocketCommand + for _, s := range ss { + id++ + + var subscribeTopic string + switch s.Channel { + case types.BookChannel: + // see https://docs.kucoin.com/#level-2-market-data + subscribeTopic = "/market/level2" + ":" + toLocalSymbol(s.Symbol) + + case types.KLineChannel: + subscribeTopic = "/market/candles" + ":" + toLocalSymbol(s.Symbol) + "_" + toLocalInterval(types.Interval(s.Options.Interval)) + + default: + return nil, fmt.Errorf("websocket channel %s is not supported by kucoin", s.Channel) + } + + cmds = append(cmds, WebSocketCommand{ + Id: id, + Type: WebSocketMessageTypeSubscribe, + Topic: subscribeTopic, + PrivateChannel: false, + Response: true, + }) + } + + return cmds, nil +} + +func hashStringID(s string) uint64 { + h := fnv.New64a() + h.Write([]byte(s)) + return h.Sum64() +} + +func toGlobalOrderStatus(o kucoinapi.Order) types.OrderStatus { + var status types.OrderStatus + if o.IsActive { + status = types.OrderStatusNew + if o.DealSize.Sign() > 0 { + status = types.OrderStatusPartiallyFilled + } + } else if o.CancelExist { + status = types.OrderStatusCanceled + } else { + status = types.OrderStatusFilled + } + + return status +} + +func toGlobalSide(s string) types.SideType { + switch s { + case "buy": + return types.SideTypeBuy + case "sell": + return types.SideTypeSell + } + + return types.SideTypeSelf +} + +func toGlobalOrderType(s string) types.OrderType { + switch s { + case "limit": + return types.OrderTypeLimit + + case "stop_limit": + return types.OrderTypeStopLimit + + case "market": + return types.OrderTypeMarket + + case "stop_market": + return types.OrderTypeStopMarket + + } + + return "" +} + +func toLocalSide(side types.SideType) kucoinapi.SideType { + switch side { + case types.SideTypeBuy: + return kucoinapi.SideTypeBuy + + case types.SideTypeSell: + return kucoinapi.SideTypeSell + + } + + return "" +} + +func toGlobalOrder(o kucoinapi.Order) types.Order { + var status = toGlobalOrderStatus(o) + var order = types.Order{ + SubmitOrder: types.SubmitOrder{ + ClientOrderID: o.ClientOrderID, + Symbol: toGlobalSymbol(o.Symbol), + Side: toGlobalSide(o.Side), + Type: toGlobalOrderType(o.Type), + Quantity: o.Size, + Price: o.Price, + StopPrice: o.StopPrice, + TimeInForce: types.TimeInForce(o.TimeInForce), + }, + Exchange: types.ExchangeKucoin, + OrderID: hashStringID(o.ID), + UUID: o.ID, + Status: status, + ExecutedQuantity: o.DealSize, + IsWorking: o.IsActive, + CreationTime: types.Time(o.CreatedAt.Time()), + UpdateTime: types.Time(o.CreatedAt.Time()), // kucoin does not response updated time + } + return order +} + +func toGlobalTrade(fill kucoinapi.Fill) types.Trade { + var trade = types.Trade{ + ID: hashStringID(fill.TradeId), + OrderID: hashStringID(fill.OrderId), + Exchange: types.ExchangeKucoin, + Price: fill.Price, + Quantity: fill.Size, + QuoteQuantity: fill.Funds, + Symbol: toGlobalSymbol(fill.Symbol), + Side: toGlobalSide(string(fill.Side)), + IsBuyer: fill.Side == kucoinapi.SideTypeBuy, + IsMaker: fill.Liquidity == kucoinapi.LiquidityTypeMaker, + Time: types.Time(fill.CreatedAt.Time()), + Fee: fill.Fee, + FeeCurrency: toGlobalSymbol(fill.FeeCurrency), + } + return trade +} diff --git a/pkg/exchange/kucoin/exchange.go b/pkg/exchange/kucoin/exchange.go new file mode 100644 index 0000000000..28da3a04ed --- /dev/null +++ b/pkg/exchange/kucoin/exchange.go @@ -0,0 +1,449 @@ +package kucoin + +import ( + "context" + "fmt" + "sort" + "strconv" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "go.uber.org/multierr" + "golang.org/x/time/rate" + + "github.com/c9s/bbgo/pkg/exchange/kucoin/kucoinapi" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +var marketDataLimiter = rate.NewLimiter(rate.Every(6*time.Second), 1) +var queryTradeLimiter = rate.NewLimiter(rate.Every(6*time.Second), 1) +var queryOrderLimiter = rate.NewLimiter(rate.Every(6*time.Second), 1) + +var ErrMissingSequence = errors.New("sequence is missing") + +// OKB is the platform currency of OKEx, pre-allocate static string here +const KCS = "KCS" + +var log = logrus.WithFields(logrus.Fields{ + "exchange": "kucoin", +}) + +type Exchange struct { + key, secret, passphrase string + client *kucoinapi.RestClient +} + +func New(key, secret, passphrase string) *Exchange { + client := kucoinapi.NewClient() + + // for public access mode + if len(key) > 0 && len(secret) > 0 && len(passphrase) > 0 { + client.Auth(key, secret, passphrase) + } + + return &Exchange{ + key: key, + // pragma: allowlist nextline secret + secret: secret, + passphrase: passphrase, + client: client, + } +} + +func (e *Exchange) Name() types.ExchangeName { + return types.ExchangeKucoin +} + +func (e *Exchange) PlatformFeeCurrency() string { + return KCS +} + +func (e *Exchange) QueryAccount(ctx context.Context) (*types.Account, error) { + req := e.client.AccountService.NewListAccountsRequest() + accounts, err := req.Do(ctx) + if err != nil { + return nil, err + } + + // for now, we only return the trading account + a := types.NewAccount() + balances := toGlobalBalanceMap(accounts) + a.UpdateBalances(balances) + return a, nil +} + +func (e *Exchange) QueryAccountBalances(ctx context.Context) (types.BalanceMap, error) { + req := e.client.AccountService.NewListAccountsRequest() + accounts, err := req.Do(ctx) + if err != nil { + return nil, err + } + + return toGlobalBalanceMap(accounts), nil +} + +func (e *Exchange) QueryMarkets(ctx context.Context) (types.MarketMap, error) { + markets, err := e.client.MarketDataService.ListSymbols() + if err != nil { + return nil, err + } + + marketMap := types.MarketMap{} + for _, s := range markets { + market := toGlobalMarket(s) + marketMap.Add(market) + } + + return marketMap, nil +} + +func (e *Exchange) QueryTicker(ctx context.Context, symbol string) (*types.Ticker, error) { + s, err := e.client.MarketDataService.GetTicker24HStat(symbol) + if err != nil { + return nil, err + } + + ticker := toGlobalTicker(*s) + return &ticker, nil +} + +func (e *Exchange) QueryTickers(ctx context.Context, symbols ...string) (map[string]types.Ticker, error) { + tickers := map[string]types.Ticker{} + if len(symbols) > 0 { + for _, s := range symbols { + t, err := e.QueryTicker(ctx, s) + if err != nil { + return nil, err + } + + tickers[s] = *t + } + + return tickers, nil + } + + allTickers, err := e.client.MarketDataService.ListTickers() + if err != nil { + return nil, err + } + + for _, s := range allTickers.Ticker { + tickers[s.Symbol] = toGlobalTicker(s) + } + + return tickers, nil +} + +// From the doc +// Type of candlestick patterns: 1min, 3min, 5min, 15min, 30min, 1hour, 2hour, 4hour, 6hour, 8hour, 12hour, 1day, 1week +var supportedIntervals = map[types.Interval]int{ + types.Interval1m: 60, + types.Interval5m: 60 * 5, + types.Interval15m: 60 * 15, + types.Interval30m: 60 * 30, + types.Interval1h: 60 * 60, + types.Interval2h: 60 * 60 * 2, + types.Interval4h: 60 * 60 * 4, + types.Interval6h: 60 * 60 * 6, + // types.Interval8h: 60 * 60 * 8, + types.Interval12h: 60 * 60 * 12, +} + +func (e *Exchange) SupportedInterval() map[types.Interval]int { + return supportedIntervals +} + +func (e *Exchange) IsSupportedInterval(interval types.Interval) bool { + _, ok := supportedIntervals[interval] + return ok +} + +func (e *Exchange) QueryKLines(ctx context.Context, symbol string, interval types.Interval, options types.KLineQueryOptions) ([]types.KLine, error) { + if err := marketDataLimiter.Wait(ctx); err != nil { + return nil, err + } + + req := e.client.MarketDataService.NewGetKLinesRequest() + req.Symbol(toLocalSymbol(symbol)) + req.Interval(toLocalInterval(interval)) + if options.StartTime != nil { + req.StartAt(*options.StartTime) + // For each query, the system would return at most **1500** pieces of data. To obtain more data, please page the data by time. + req.EndAt(options.StartTime.Add(1500 * interval.Duration())) + } else if options.EndTime != nil { + req.EndAt(*options.EndTime) + } + + ks, err := req.Do(ctx) + if err != nil { + return nil, err + } + + var klines []types.KLine + for _, k := range ks { + gi := toGlobalInterval(k.Interval) + klines = append(klines, types.KLine{ + Exchange: types.ExchangeKucoin, + Symbol: toGlobalSymbol(k.Symbol), + StartTime: types.Time(k.StartTime), + EndTime: types.Time(k.StartTime.Add(gi.Duration() - time.Millisecond)), + Interval: gi, + Open: k.Open, + Close: k.Close, + High: k.High, + Low: k.Low, + Volume: k.Volume, + QuoteVolume: k.QuoteVolume, + Closed: true, + }) + } + + sort.Slice(klines, func(i, j int) bool { + return klines[i].StartTime.Before(klines[j].StartTime.Time()) + }) + + return klines, nil +} + +func (e *Exchange) SubmitOrders(ctx context.Context, orders ...types.SubmitOrder) (createdOrders types.OrderSlice, err error) { + for _, order := range orders { + req := e.client.TradeService.NewPlaceOrderRequest() + req.Symbol(toLocalSymbol(order.Symbol)) + req.Side(toLocalSide(order.Side)) + + if order.ClientOrderID != "" { + req.ClientOrderID(order.ClientOrderID) + } + + if order.Market.Symbol != "" { + req.Size(order.Market.FormatQuantity(order.Quantity)) + } else { + // TODO: report error? + req.Size(order.Quantity.FormatString(8)) + } + + // set price field for limit orders + switch order.Type { + case types.OrderTypeStopLimit, types.OrderTypeLimit, types.OrderTypeLimitMaker: + if order.Market.Symbol != "" { + req.Price(order.Market.FormatPrice(order.Price)) + } else { + // TODO: report error? + req.Price(order.Price.FormatString(8)) + } + } + + if order.Type == types.OrderTypeLimitMaker { + req.PostOnly(true) + } + + switch order.TimeInForce { + case "FOK": + req.TimeInForce(kucoinapi.TimeInForceFOK) + case "IOC": + req.TimeInForce(kucoinapi.TimeInForceIOC) + default: + // default to GTC + req.TimeInForce(kucoinapi.TimeInForceGTC) + } + + switch order.Type { + case types.OrderTypeStopLimit: + req.OrderType(kucoinapi.OrderTypeStopLimit) + + case types.OrderTypeLimit, types.OrderTypeLimitMaker: + req.OrderType(kucoinapi.OrderTypeLimit) + + case types.OrderTypeMarket: + req.OrderType(kucoinapi.OrderTypeMarket) + } + + orderResponse, err := req.Do(ctx) + if err != nil { + return createdOrders, err + } + + createdOrders = append(createdOrders, types.Order{ + SubmitOrder: order, + Exchange: types.ExchangeKucoin, + OrderID: hashStringID(orderResponse.OrderID), + UUID: orderResponse.OrderID, + Status: types.OrderStatusNew, + ExecutedQuantity: fixedpoint.Zero, + IsWorking: true, + CreationTime: types.Time(time.Now()), + UpdateTime: types.Time(time.Now()), + }) + } + + return createdOrders, err +} + +// QueryOpenOrders +/* +Documentation from the Kucoin API page + +Any order on the exchange order book is in active status. +Orders removed from the order book will be marked with done status. +After an order becomes done, there may be a few milliseconds latency before it’s fully settled. + +You can check the orders in any status. +If the status parameter is not specified, orders of done status will be returned by default. + +When you query orders in active status, there is no time limit. +However, when you query orders in done status, the start and end time range cannot exceed 7* 24 hours. +An error will occur if the specified time window exceeds the range. + +If you specify the end time only, the system will automatically calculate the start time as end time minus 7*24 hours, and vice versa. + +The history for cancelled orders is only kept for one month. +You will not be able to query for cancelled orders that have happened more than a month ago. +*/ +func (e *Exchange) QueryOpenOrders(ctx context.Context, symbol string) (orders []types.Order, err error) { + req := e.client.TradeService.NewListOrdersRequest() + req.Symbol(toLocalSymbol(symbol)) + req.Status("active") + orderList, err := req.Do(ctx) + if err != nil { + return nil, err + } + + // TODO: support pagination (right now we can only get 50 items from the first page) + for _, o := range orderList.Items { + order := toGlobalOrder(o) + orders = append(orders, order) + } + + return orders, err +} + +func (e *Exchange) QueryClosedOrders(ctx context.Context, symbol string, since, until time.Time, lastOrderID uint64) (orders []types.Order, err error) { + req := e.client.TradeService.NewListOrdersRequest() + req.Symbol(toLocalSymbol(symbol)) + req.Status("done") + req.StartAt(since) + + // kucoin: + // When you query orders in active status, there is no time limit. + // However, when you query orders in done status, the start and end time range cannot exceed 7* 24 hours. + // An error will occur if the specified time window exceeds the range. + // If you specify the end time only, the system will automatically calculate the start time as end time minus 7*24 hours, and vice versa. + if until.Sub(since) < 7*24*time.Hour { + req.EndAt(until) + } + + if err := queryOrderLimiter.Wait(ctx); err != nil { + return nil, err + } + + orderList, err := req.Do(ctx) + if err != nil { + return orders, err + } + + for _, o := range orderList.Items { + order := toGlobalOrder(o) + orders = append(orders, order) + } + + return orders, err +} + +var launchDate = time.Date(2017, 9, 0, 0, 0, 0, 0, time.UTC) + +func (e *Exchange) QueryTrades(ctx context.Context, symbol string, options *types.TradeQueryOptions) (trades []types.Trade, err error) { + req := e.client.TradeService.NewGetFillsRequest() + req.Symbol(toLocalSymbol(symbol)) + + // we always sync trades in the ascending order, and kucoin does not support last trade ID query + // hence we need to set the start time here + if options.StartTime != nil && options.StartTime.Before(launchDate) { + // copy the time data object + t := launchDate + options.StartTime = &t + } + + if options.StartTime != nil && options.EndTime != nil { + req.StartAt(*options.StartTime) + + if options.EndTime.Sub(*options.StartTime) < 7*24*time.Hour { + req.EndAt(*options.EndTime) + } + } else if options.StartTime != nil { + req.StartAt(*options.StartTime) + } else if options.EndTime != nil { + req.EndAt(*options.EndTime) + } + + if err := queryTradeLimiter.Wait(ctx); err != nil { + return trades, err + } + + response, err := req.Do(ctx) + if err != nil { + return trades, err + } + + for _, fill := range response.Items { + trade := toGlobalTrade(fill) + trades = append(trades, trade) + } + + return trades, nil +} + +func (e *Exchange) CancelOrders(ctx context.Context, orders ...types.Order) (errs error) { + for _, o := range orders { + req := e.client.TradeService.NewCancelOrderRequest() + + if o.UUID != "" { + req.OrderID(o.UUID) + } else if o.ClientOrderID != "" { + req.ClientOrderID(o.ClientOrderID) + } else { + errs = multierr.Append( + errs, + fmt.Errorf("the order uuid or client order id is empty, order: %#v", o), + ) + continue + } + + response, err := req.Do(ctx) + if err != nil { + errs = multierr.Append(errs, err) + continue + } + + log.Infof("cancelled orders: %v", response.CancelledOrderIDs) + } + + return errors.Wrap(errs, "order cancel error") +} + +func (e *Exchange) NewStream() types.Stream { + return NewStream(e.client, e) +} + +func (e *Exchange) QueryDepth(ctx context.Context, symbol string) (types.SliceOrderBook, int64, error) { + orderBook, err := e.client.MarketDataService.GetOrderBook(toLocalSymbol(symbol), 100) + if err != nil { + return types.SliceOrderBook{}, 0, err + } + + if len(orderBook.Sequence) == 0 { + return types.SliceOrderBook{}, 0, ErrMissingSequence + } + + sequence, err := strconv.ParseInt(orderBook.Sequence, 10, 64) + if err != nil { + return types.SliceOrderBook{}, 0, err + } + + return types.SliceOrderBook{ + Symbol: toGlobalSymbol(symbol), + Bids: orderBook.Bids, + Asks: orderBook.Asks, + }, sequence, nil +} diff --git a/pkg/exchange/kucoin/generate_symbol_map.go b/pkg/exchange/kucoin/generate_symbol_map.go new file mode 100644 index 0000000000..249a5ec17b --- /dev/null +++ b/pkg/exchange/kucoin/generate_symbol_map.go @@ -0,0 +1,77 @@ +//go:build ignore +// +build ignore + +package main + +import ( + "encoding/json" + "log" + "net/http" + "os" + "strings" + "text/template" + + "github.com/c9s/bbgo/pkg/exchange/kucoin/kucoinapi" +) + +var packageTemplate = template.Must(template.New("").Parse(`// Code generated by go generate; DO NOT EDIT. +package kucoin + +var symbolMap = map[string]string{ +{{- range $k, $v := . }} + {{ printf "%q" $k }}: {{ printf "%q" $v }}, +{{- end }} +} + +func toLocalSymbol(symbol string) string { + s, ok := symbolMap[symbol] + if ok { + return s + } + + return symbol +} +`)) + +type Market struct { + Symbol string `json:"symbol"` +} + +type ApiResponse struct { + Data []Market `json:"data"` +} + +func main() { + + const apiUrl = kucoinapi.RestBaseURL + "/v1/symbols" + + resp, err := http.Get(apiUrl) + if err != nil { + log.Fatal(err) + } + + defer resp.Body.Close() + + r := &ApiResponse{} + if err := json.NewDecoder(resp.Body).Decode(r); err != nil { + log.Fatal(err) + } + + var data = map[string]string{} + for _, m := range r.Data { + key := strings.ReplaceAll(strings.ToUpper(strings.TrimSpace(m.Symbol)), "-", "") + data[key] = m.Symbol + } + + f, err := os.Create("symbols.go") + if err != nil { + log.Fatal(err) + } + + defer f.Close() + + err = packageTemplate.Execute(f, data) + if err != nil { + log.Fatal(err) + } +} diff --git a/pkg/exchange/kucoin/kucoinapi/account.go b/pkg/exchange/kucoin/kucoinapi/account.go new file mode 100644 index 0000000000..81757093df --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/account.go @@ -0,0 +1,58 @@ +package kucoinapi + +//go:generate -command GetRequest requestgen -method GET -responseType .APIResponse -responseDataField Data +//go:generate -command PostRequest requestgen -method POST -responseType .APIResponse -responseDataField Data + +import ( + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type AccountService struct { + client *RestClient +} + +func (s *AccountService) NewListSubAccountsRequest() *ListSubAccountsRequest { + return &ListSubAccountsRequest{client: s.client} +} + +func (s *AccountService) NewListAccountsRequest() *ListAccountsRequest { + return &ListAccountsRequest{client: s.client} +} + +func (s *AccountService) NewGetAccountRequest(accountID string) *GetAccountRequest { + return &GetAccountRequest{client: s.client, accountID: accountID} +} + +type SubAccount struct { + UserID string `json:"userId"` + Name string `json:"subName"` + Type string `json:"type"` + Remark string `json:"remarks"` +} + +//go:generate GetRequest -url "/api/v1/sub/user" -type ListSubAccountsRequest -responseDataType []SubAccount +type ListSubAccountsRequest struct { + client requestgen.AuthenticatedAPIClient +} + +type Account struct { + ID string `json:"id"` + Currency string `json:"currency"` + Type AccountType `json:"type"` + Balance fixedpoint.Value `json:"balance"` + Available fixedpoint.Value `json:"available"` + Holds fixedpoint.Value `json:"holds"` +} + +//go:generate GetRequest -url "/api/v1/accounts" -type ListAccountsRequest -responseDataType []Account +type ListAccountsRequest struct { + client requestgen.AuthenticatedAPIClient +} + +//go:generate GetRequest -url "/api/v1/accounts/:accountID" -type GetAccountRequest -responseDataType .Account +type GetAccountRequest struct { + client requestgen.AuthenticatedAPIClient + accountID string `param:"accountID,slug"` +} diff --git a/pkg/exchange/kucoin/kucoinapi/bullet.go b/pkg/exchange/kucoin/kucoinapi/bullet.go new file mode 100644 index 0000000000..0226fad9bc --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/bullet.go @@ -0,0 +1,67 @@ +package kucoinapi + +import ( + "net/url" + "time" + + "github.com/c9s/requestgen" + "github.com/pkg/errors" +) + +type BulletService struct { + client *RestClient +} + +func (s *BulletService) NewGetPublicBulletRequest() *GetPublicBulletRequest { + return &GetPublicBulletRequest{client: s.client} +} + +func (s *BulletService) NewGetPrivateBulletRequest() *GetPrivateBulletRequest { + return &GetPrivateBulletRequest{client: s.client} +} + +type Bullet struct { + InstanceServers []struct { + Endpoint string `json:"endpoint"` + Protocol string `json:"protocol"` + Encrypt bool `json:"encrypt"` + PingInterval int `json:"pingInterval"` + PingTimeout int `json:"pingTimeout"` + } `json:"instanceServers"` + Token string `json:"token"` +} + +func (b *Bullet) PingInterval() time.Duration { + return time.Duration(b.InstanceServers[0].PingInterval) * time.Millisecond +} + +func (b *Bullet) PingTimeout() time.Duration { + return time.Duration(b.InstanceServers[0].PingTimeout) * time.Millisecond +} + +func (b *Bullet) URL() (*url.URL, error) { + if len(b.InstanceServers) == 0 { + return nil, errors.New("InstanceServers is empty") + } + + u, err := url.Parse(b.InstanceServers[0].Endpoint) + if err != nil { + return nil, err + } + + params := url.Values{} + params.Add("token", b.Token) + + u.RawQuery = params.Encode() + return u, nil +} + +//go:generate requestgen -type GetPublicBulletRequest -method "POST" -url "/api/v1/bullet-public" -responseType .APIResponse -responseDataField Data -responseDataType .Bullet +type GetPublicBulletRequest struct { + client requestgen.APIClient +} + +//go:generate requestgen -type GetPrivateBulletRequest -method "POST" -url "/api/v1/bullet-private" -responseType .APIResponse -responseDataField Data -responseDataType .Bullet +type GetPrivateBulletRequest struct { + client requestgen.AuthenticatedAPIClient +} diff --git a/pkg/exchange/kucoin/kucoinapi/cancel_all_order_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/cancel_all_order_request_requestgen.go new file mode 100644 index 0000000000..d6ad722255 --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/cancel_all_order_request_requestgen.go @@ -0,0 +1,143 @@ +// Code generated by "requestgen -method DELETE -responseType .APIResponse -responseDataField Data -url /api/v1/orders -type CancelAllOrderRequest -responseDataType .CancelOrderResponse"; DO NOT EDIT. + +package kucoinapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (r *CancelAllOrderRequest) Symbol(symbol string) *CancelAllOrderRequest { + r.symbol = &symbol + return r +} + +func (r *CancelAllOrderRequest) TradeType(tradeType string) *CancelAllOrderRequest { + r.tradeType = &tradeType + return r +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (r *CancelAllOrderRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (r *CancelAllOrderRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check symbol field -> json key symbol + if r.symbol != nil { + symbol := *r.symbol + + // assign parameter of symbol + params["symbol"] = symbol + } else { + } + // check tradeType field -> json key tradeType + if r.tradeType != nil { + tradeType := *r.tradeType + + // assign parameter of tradeType + params["tradeType"] = tradeType + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (r *CancelAllOrderRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := r.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (r *CancelAllOrderRequest) GetParametersJSON() ([]byte, error) { + params, err := r.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (r *CancelAllOrderRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (r *CancelAllOrderRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (r *CancelAllOrderRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := r.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (r *CancelAllOrderRequest) Do(ctx context.Context) (*CancelOrderResponse, error) { + + params, err := r.GetParameters() + if err != nil { + return nil, err + } + query := url.Values{} + + apiURL := "/api/v1/orders" + + req, err := r.client.NewAuthenticatedRequest(ctx, "DELETE", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := r.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data CancelOrderResponse + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/cancel_order_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/cancel_order_request_requestgen.go new file mode 100644 index 0000000000..e53a6cbe9e --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/cancel_order_request_requestgen.go @@ -0,0 +1,111 @@ +// Code generated by "requestgen -type CancelOrderRequest"; DO NOT EDIT. + +package kucoinapi + +import ( + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (r *CancelOrderRequest) OrderID(orderID string) *CancelOrderRequest { + r.orderID = &orderID + return r +} + +func (r *CancelOrderRequest) ClientOrderID(clientOrderID string) *CancelOrderRequest { + r.clientOrderID = &clientOrderID + return r +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (r *CancelOrderRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (r *CancelOrderRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check orderID field -> json key orderID + if r.orderID != nil { + orderID := *r.orderID + + // assign parameter of orderID + params["orderID"] = orderID + } else { + } + // check clientOrderID field -> json key clientOrderID + if r.clientOrderID != nil { + clientOrderID := *r.clientOrderID + + // assign parameter of clientOrderID + params["clientOrderID"] = clientOrderID + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (r *CancelOrderRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := r.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (r *CancelOrderRequest) GetParametersJSON() ([]byte, error) { + params, err := r.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (r *CancelOrderRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (r *CancelOrderRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (r *CancelOrderRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := r.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/client.go b/pkg/exchange/kucoin/kucoinapi/client.go new file mode 100644 index 0000000000..6df47191c2 --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/client.go @@ -0,0 +1,155 @@ +package kucoinapi + +import ( + "bytes" + "context" + "crypto/hmac" + "crypto/sha256" + "encoding/base64" + "encoding/json" + "net/http" + "net/url" + "strconv" + "strings" + "time" + + "github.com/c9s/requestgen" + "github.com/pkg/errors" +) + +const defaultHTTPTimeout = time.Second * 15 +const RestBaseURL = "https://api.kucoin.com/api" +const SandboxRestBaseURL = "https://openapi-sandbox.kucoin.com/api" + +type RestClient struct { + requestgen.BaseAPIClient + + Key, Secret, Passphrase string + KeyVersion string + + AccountService *AccountService + MarketDataService *MarketDataService + TradeService *TradeService + BulletService *BulletService +} + +func NewClient() *RestClient { + u, err := url.Parse(RestBaseURL) + if err != nil { + panic(err) + } + + client := &RestClient{ + BaseAPIClient: requestgen.BaseAPIClient{ + BaseURL: u, + HttpClient: &http.Client{ + Timeout: defaultHTTPTimeout, + }, + }, + KeyVersion: "2", + } + + client.AccountService = &AccountService{client: client} + client.MarketDataService = &MarketDataService{client: client} + client.TradeService = &TradeService{client: client} + client.BulletService = &BulletService{client: client} + return client +} + +func (c *RestClient) Auth(key, secret, passphrase string) { + c.Key = key + // pragma: allowlist nextline secret + c.Secret = secret + c.Passphrase = passphrase +} + +// newAuthenticatedRequest creates new http request for authenticated routes. +func (c *RestClient) NewAuthenticatedRequest(ctx context.Context, method, refURL string, params url.Values, payload interface{}) (*http.Request, error) { + if len(c.Key) == 0 { + return nil, errors.New("empty api key") + } + + if len(c.Secret) == 0 { + return nil, errors.New("empty api secret") + } + + rel, err := url.Parse(refURL) + if err != nil { + return nil, err + } + + if params != nil { + rel.RawQuery = params.Encode() + } + + pathURL := c.BaseURL.ResolveReference(rel) + path := pathURL.Path + if rel.RawQuery != "" { + path += "?" + rel.RawQuery + } + + body, err := castPayload(payload) + if err != nil { + return nil, err + } + + req, err := http.NewRequestWithContext(ctx, method, pathURL.String(), bytes.NewReader(body)) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", "application/json") + req.Header.Add("Accept", "application/json") + + // Build authentication headers + c.attachAuthHeaders(req, method, path, body) + return req, nil +} + +func (c *RestClient) attachAuthHeaders(req *http.Request, method string, path string, body []byte) { + // Set location to UTC so that it outputs "2020-12-08T09:08:57.715Z" + t := time.Now().In(time.UTC) + // timestamp := t.Format("2006-01-02T15:04:05.999Z07:00") + timestamp := strconv.FormatInt(t.UnixNano()/int64(time.Millisecond), 10) + signKey := timestamp + strings.ToUpper(method) + path + string(body) + signature := sign(c.Secret, signKey) + + req.Header.Add("KC-API-KEY", c.Key) + req.Header.Add("KC-API-SIGN", signature) + req.Header.Add("KC-API-TIMESTAMP", timestamp) + req.Header.Add("KC-API-PASSPHRASE", sign(c.Secret, c.Passphrase)) + req.Header.Add("KC-API-KEY-VERSION", c.KeyVersion) +} + +// sign uses sha256 to sign the payload with the given secret +func sign(secret, payload string) string { + var sig = hmac.New(sha256.New, []byte(secret)) + _, err := sig.Write([]byte(payload)) + if err != nil { + return "" + } + + return base64.StdEncoding.EncodeToString(sig.Sum(nil)) +} + +func castPayload(payload interface{}) ([]byte, error) { + if payload == nil { + return nil, nil + } + + switch v := payload.(type) { + case string: + return []byte(v), nil + + case []byte: + return v, nil + + } + return json.Marshal(payload) +} + +type APIResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data json.RawMessage `json:"data"` +} diff --git a/pkg/exchange/kucoin/kucoinapi/get_account_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/get_account_request_requestgen.go new file mode 100644 index 0000000000..5abda240eb --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/get_account_request_requestgen.go @@ -0,0 +1,131 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Data -url /api/v1/accounts/:accountID -type GetAccountRequest -responseDataType .Account"; DO NOT EDIT. + +package kucoinapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (g *GetAccountRequest) AccountID(accountID string) *GetAccountRequest { + g.accountID = accountID + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetAccountRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetAccountRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetAccountRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetAccountRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetAccountRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check accountID field -> json key accountID + accountID := g.accountID + + // assign parameter of accountID + params["accountID"] = accountID + + return params, nil +} + +func (g *GetAccountRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetAccountRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetAccountRequest) Do(ctx context.Context) (*Account, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/api/v1/accounts/:accountID" + slugs, err := g.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = g.applySlugsToUrl(apiURL, slugs) + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data Account + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/get_all_tickers_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/get_all_tickers_request_requestgen.go new file mode 100644 index 0000000000..f633ac0ad8 --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/get_all_tickers_request_requestgen.go @@ -0,0 +1,115 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Data -type GetAllTickersRequest -url /api/v1/market/allTickers -responseDataType AllTickers"; DO NOT EDIT. + +package kucoinapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetAllTickersRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetAllTickersRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetAllTickersRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetAllTickersRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetAllTickersRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetAllTickersRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetAllTickersRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetAllTickersRequest) Do(ctx context.Context) (*AllTickers, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/api/v1/market/allTickers" + + req, err := g.client.NewRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data AllTickers + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/get_fills_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/get_fills_request_requestgen.go new file mode 100644 index 0000000000..307d1dc13f --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/get_fills_request_requestgen.go @@ -0,0 +1,239 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Data -url /api/v1/fills -type GetFillsRequest -responseDataType .FillListPage"; DO NOT EDIT. + +package kucoinapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" + "strconv" + "time" +) + +func (r *GetFillsRequest) OrderID(orderID string) *GetFillsRequest { + r.orderID = &orderID + return r +} + +func (r *GetFillsRequest) TradeType(tradeType string) *GetFillsRequest { + r.tradeType = &tradeType + return r +} + +func (r *GetFillsRequest) Symbol(symbol string) *GetFillsRequest { + r.symbol = &symbol + return r +} + +func (r *GetFillsRequest) Side(side string) *GetFillsRequest { + r.side = &side + return r +} + +func (r *GetFillsRequest) OrderType(orderType string) *GetFillsRequest { + r.orderType = &orderType + return r +} + +func (r *GetFillsRequest) StartAt(startAt time.Time) *GetFillsRequest { + r.startAt = &startAt + return r +} + +func (r *GetFillsRequest) EndAt(endAt time.Time) *GetFillsRequest { + r.endAt = &endAt + return r +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (r *GetFillsRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check orderID field -> json key orderId + if r.orderID != nil { + orderID := *r.orderID + + // assign parameter of orderID + params["orderId"] = orderID + } else { + } + // check tradeType field -> json key tradeType + if r.tradeType != nil { + tradeType := *r.tradeType + + // assign parameter of tradeType + params["tradeType"] = tradeType + } else { + tradeType := "TRADE" + + // assign parameter of tradeType + params["tradeType"] = tradeType + } + // check symbol field -> json key symbol + if r.symbol != nil { + symbol := *r.symbol + + // assign parameter of symbol + params["symbol"] = symbol + } else { + } + // check side field -> json key side + if r.side != nil { + side := *r.side + + // TEMPLATE check-valid-values + switch side { + case "buy", "sell": + params["side"] = side + + default: + return nil, fmt.Errorf("side value %v is invalid", side) + + } + // END TEMPLATE check-valid-values + + // assign parameter of side + params["side"] = side + } else { + } + // check orderType field -> json key type + if r.orderType != nil { + orderType := *r.orderType + + // TEMPLATE check-valid-values + switch orderType { + case "limit", "market", "limit_stop", "market_stop": + params["type"] = orderType + + default: + return nil, fmt.Errorf("type value %v is invalid", orderType) + + } + // END TEMPLATE check-valid-values + + // assign parameter of orderType + params["type"] = orderType + } else { + } + // check startAt field -> json key startAt + if r.startAt != nil { + startAt := *r.startAt + + // assign parameter of startAt + // convert time.Time to milliseconds time stamp + params["startAt"] = strconv.FormatInt(startAt.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check endAt field -> json key endAt + if r.endAt != nil { + endAt := *r.endAt + + // assign parameter of endAt + // convert time.Time to milliseconds time stamp + params["endAt"] = strconv.FormatInt(endAt.UnixNano()/int64(time.Millisecond), 10) + } else { + } + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (r *GetFillsRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (r *GetFillsRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := r.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (r *GetFillsRequest) GetParametersJSON() ([]byte, error) { + params, err := r.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (r *GetFillsRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (r *GetFillsRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (r *GetFillsRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := r.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (r *GetFillsRequest) Do(ctx context.Context) (*FillListPage, error) { + + // no body params + var params interface{} + query, err := r.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/api/v1/fills" + + req, err := r.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := r.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data FillListPage + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/get_k_lines_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/get_k_lines_request_requestgen.go new file mode 100644 index 0000000000..839f2fa8fe --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/get_k_lines_request_requestgen.go @@ -0,0 +1,142 @@ +// Code generated by "requestgen -type GetKLinesRequest"; DO NOT EDIT. + +package kucoinapi + +import ( + "encoding/json" + "fmt" + "net/url" + "regexp" + "strconv" + "time" +) + +func (r *GetKLinesRequest) Symbol(symbol string) *GetKLinesRequest { + r.symbol = symbol + return r +} + +func (r *GetKLinesRequest) Interval(interval string) *GetKLinesRequest { + r.interval = interval + return r +} + +func (r *GetKLinesRequest) StartAt(startAt time.Time) *GetKLinesRequest { + r.startAt = &startAt + return r +} + +func (r *GetKLinesRequest) EndAt(endAt time.Time) *GetKLinesRequest { + r.endAt = &endAt + return r +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (r *GetKLinesRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (r *GetKLinesRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check symbol field -> json key symbol + symbol := r.symbol + + // assign parameter of symbol + params["symbol"] = symbol + // check interval field -> json key type + interval := r.interval + + switch interval { + case "1min", "3min", "5min", "15min", "30min", "1hour", "2hour", "4hour", "6hour", "8hour", "12hour", "1day", "1week": + params["type"] = interval + + default: + return params, fmt.Errorf("type value %v is invalid", interval) + + } + + // assign parameter of interval + params["type"] = interval + // check startAt field -> json key startAt + if r.startAt != nil { + startAt := *r.startAt + + // assign parameter of startAt + // convert time.Time to seconds time stamp + params["startAt"] = strconv.FormatInt(startAt.Unix(), 10) + } + // check endAt field -> json key endAt + if r.endAt != nil { + endAt := *r.endAt + + // assign parameter of endAt + // convert time.Time to seconds time stamp + params["endAt"] = strconv.FormatInt(endAt.Unix(), 10) + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (r *GetKLinesRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := r.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (r *GetKLinesRequest) GetParametersJSON() ([]byte, error) { + params, err := r.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (r *GetKLinesRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (r *GetKLinesRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (r *GetKLinesRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := r.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/get_order_book_level_2_depth_100_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/get_order_book_level_2_depth_100_request_requestgen.go new file mode 100644 index 0000000000..7a770352f4 --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/get_order_book_level_2_depth_100_request_requestgen.go @@ -0,0 +1,128 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Data -type GetOrderBookLevel2Depth100Request -url /api/v1/market/orderbook/level2_100 -responseDataType .OrderBook"; DO NOT EDIT. + +package kucoinapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (g *GetOrderBookLevel2Depth100Request) Symbol(symbol string) *GetOrderBookLevel2Depth100Request { + g.symbol = symbol + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetOrderBookLevel2Depth100Request) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check symbol field -> json key symbol + symbol := g.symbol + + // assign parameter of symbol + params["symbol"] = symbol + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetOrderBookLevel2Depth100Request) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetOrderBookLevel2Depth100Request) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetOrderBookLevel2Depth100Request) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetOrderBookLevel2Depth100Request) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetOrderBookLevel2Depth100Request) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetOrderBookLevel2Depth100Request) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetOrderBookLevel2Depth100Request) Do(ctx context.Context) (*OrderBook, error) { + + // no body params + var params interface{} + query, err := g.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/api/v1/market/orderbook/level2_100" + + req, err := g.client.NewRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data OrderBook + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/get_order_book_level_2_depth_20_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/get_order_book_level_2_depth_20_request_requestgen.go new file mode 100644 index 0000000000..871a893406 --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/get_order_book_level_2_depth_20_request_requestgen.go @@ -0,0 +1,128 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Data -type GetOrderBookLevel2Depth20Request -url /api/v1/market/orderbook/level2_20 -responseDataType .OrderBook"; DO NOT EDIT. + +package kucoinapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (g *GetOrderBookLevel2Depth20Request) Symbol(symbol string) *GetOrderBookLevel2Depth20Request { + g.symbol = symbol + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetOrderBookLevel2Depth20Request) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check symbol field -> json key symbol + symbol := g.symbol + + // assign parameter of symbol + params["symbol"] = symbol + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetOrderBookLevel2Depth20Request) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetOrderBookLevel2Depth20Request) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetOrderBookLevel2Depth20Request) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetOrderBookLevel2Depth20Request) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetOrderBookLevel2Depth20Request) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetOrderBookLevel2Depth20Request) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetOrderBookLevel2Depth20Request) Do(ctx context.Context) (*OrderBook, error) { + + // no body params + var params interface{} + query, err := g.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/api/v1/market/orderbook/level2_20" + + req, err := g.client.NewRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data OrderBook + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/get_order_book_level_2_depth_all_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/get_order_book_level_2_depth_all_request_requestgen.go new file mode 100644 index 0000000000..b71792b964 --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/get_order_book_level_2_depth_all_request_requestgen.go @@ -0,0 +1,128 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Data -type GetOrderBookLevel2DepthAllRequest -url /api/v3/market/orderbook/level2 -responseDataType .OrderBook"; DO NOT EDIT. + +package kucoinapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (g *GetOrderBookLevel2DepthAllRequest) Symbol(symbol string) *GetOrderBookLevel2DepthAllRequest { + g.symbol = symbol + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetOrderBookLevel2DepthAllRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check symbol field -> json key symbol + symbol := g.symbol + + // assign parameter of symbol + params["symbol"] = symbol + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetOrderBookLevel2DepthAllRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetOrderBookLevel2DepthAllRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetOrderBookLevel2DepthAllRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetOrderBookLevel2DepthAllRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetOrderBookLevel2DepthAllRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetOrderBookLevel2DepthAllRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetOrderBookLevel2DepthAllRequest) Do(ctx context.Context) (*OrderBook, error) { + + // no body params + var params interface{} + query, err := g.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/api/v3/market/orderbook/level2" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data OrderBook + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/get_private_bullet_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/get_private_bullet_request_requestgen.go new file mode 100644 index 0000000000..535d46ed8f --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/get_private_bullet_request_requestgen.go @@ -0,0 +1,115 @@ +// Code generated by "requestgen -type GetPrivateBulletRequest -method POST -url /api/v1/bullet-private -responseType .APIResponse -responseDataField Data -responseDataType .Bullet"; DO NOT EDIT. + +package kucoinapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetPrivateBulletRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetPrivateBulletRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetPrivateBulletRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetPrivateBulletRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetPrivateBulletRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetPrivateBulletRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetPrivateBulletRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetPrivateBulletRequest) Do(ctx context.Context) (*Bullet, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/api/v1/bullet-private" + + req, err := g.client.NewAuthenticatedRequest(ctx, "POST", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data Bullet + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/get_public_bullet_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/get_public_bullet_request_requestgen.go new file mode 100644 index 0000000000..c7b8aba98e --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/get_public_bullet_request_requestgen.go @@ -0,0 +1,115 @@ +// Code generated by "requestgen -type GetPublicBulletRequest -method POST -url /api/v1/bullet-public -responseType .APIResponse -responseDataField Data -responseDataType .Bullet"; DO NOT EDIT. + +package kucoinapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetPublicBulletRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetPublicBulletRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetPublicBulletRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetPublicBulletRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetPublicBulletRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetPublicBulletRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetPublicBulletRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetPublicBulletRequest) Do(ctx context.Context) (*Bullet, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/api/v1/bullet-public" + + req, err := g.client.NewRequest(ctx, "POST", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data Bullet + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/get_ticker_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/get_ticker_request_requestgen.go new file mode 100644 index 0000000000..761f725e87 --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/get_ticker_request_requestgen.go @@ -0,0 +1,128 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Data -type GetTickerRequest -url /api/v1/market/orderbook/level1 -responseDataType Ticker"; DO NOT EDIT. + +package kucoinapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (g *GetTickerRequest) Symbol(symbol string) *GetTickerRequest { + g.symbol = symbol + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetTickerRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + // check symbol field -> json key symbol + symbol := g.symbol + + // assign parameter of symbol + params["symbol"] = symbol + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetTickerRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetTickerRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetTickerRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetTickerRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetTickerRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetTickerRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetTickerRequest) Do(ctx context.Context) (*Ticker, error) { + + // no body params + var params interface{} + query, err := g.GetQueryParameters() + if err != nil { + return nil, err + } + + apiURL := "/api/v1/market/orderbook/level1" + + req, err := g.client.NewRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data Ticker + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/list_accounts_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/list_accounts_request_requestgen.go new file mode 100644 index 0000000000..253f186244 --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/list_accounts_request_requestgen.go @@ -0,0 +1,115 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Data -url /api/v1/accounts -type ListAccountsRequest -responseDataType []Account"; DO NOT EDIT. + +package kucoinapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (l *ListAccountsRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (l *ListAccountsRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (l *ListAccountsRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := l.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (l *ListAccountsRequest) GetParametersJSON() ([]byte, error) { + params, err := l.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (l *ListAccountsRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (l *ListAccountsRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (l *ListAccountsRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := l.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (l *ListAccountsRequest) Do(ctx context.Context) ([]Account, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/api/v1/accounts" + + req, err := l.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := l.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []Account + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/list_history_orders_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/list_history_orders_request_requestgen.go new file mode 100644 index 0000000000..3653765870 --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/list_history_orders_request_requestgen.go @@ -0,0 +1,161 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Data -url /api/v1/hist-orders -type ListHistoryOrdersRequest -responseDataType .HistoryOrderListPage"; DO NOT EDIT. + +package kucoinapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" + "strconv" + "time" +) + +func (l *ListHistoryOrdersRequest) Symbol(symbol string) *ListHistoryOrdersRequest { + l.symbol = &symbol + return l +} + +func (l *ListHistoryOrdersRequest) StartAt(startAt time.Time) *ListHistoryOrdersRequest { + l.startAt = &startAt + return l +} + +func (l *ListHistoryOrdersRequest) EndAt(endAt time.Time) *ListHistoryOrdersRequest { + l.endAt = &endAt + return l +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (l *ListHistoryOrdersRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (l *ListHistoryOrdersRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check symbol field -> json key symbol + if l.symbol != nil { + symbol := *l.symbol + + // assign parameter of symbol + params["symbol"] = symbol + } else { + } + // check startAt field -> json key startAt + if l.startAt != nil { + startAt := *l.startAt + + // assign parameter of startAt + // convert time.Time to milliseconds time stamp + params["startAt"] = strconv.FormatInt(startAt.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check endAt field -> json key endAt + if l.endAt != nil { + endAt := *l.endAt + + // assign parameter of endAt + // convert time.Time to milliseconds time stamp + params["endAt"] = strconv.FormatInt(endAt.UnixNano()/int64(time.Millisecond), 10) + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (l *ListHistoryOrdersRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := l.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (l *ListHistoryOrdersRequest) GetParametersJSON() ([]byte, error) { + params, err := l.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (l *ListHistoryOrdersRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (l *ListHistoryOrdersRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (l *ListHistoryOrdersRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := l.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (l *ListHistoryOrdersRequest) Do(ctx context.Context) (*HistoryOrderListPage, error) { + + // empty params for GET operation + var params interface{} + query, err := l.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/api/v1/hist-orders" + + req, err := l.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := l.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data HistoryOrderListPage + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/list_orders_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/list_orders_request_requestgen.go new file mode 100644 index 0000000000..900e220e5d --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/list_orders_request_requestgen.go @@ -0,0 +1,239 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Data -url /api/v1/orders -type ListOrdersRequest -responseDataType .OrderListPage"; DO NOT EDIT. + +package kucoinapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" + "strconv" + "time" +) + +func (r *ListOrdersRequest) Status(status string) *ListOrdersRequest { + r.status = &status + return r +} + +func (r *ListOrdersRequest) Symbol(symbol string) *ListOrdersRequest { + r.symbol = &symbol + return r +} + +func (r *ListOrdersRequest) Side(side SideType) *ListOrdersRequest { + r.side = &side + return r +} + +func (r *ListOrdersRequest) OrderType(orderType OrderType) *ListOrdersRequest { + r.orderType = &orderType + return r +} + +func (r *ListOrdersRequest) TradeType(tradeType TradeType) *ListOrdersRequest { + r.tradeType = &tradeType + return r +} + +func (r *ListOrdersRequest) StartAt(startAt time.Time) *ListOrdersRequest { + r.startAt = &startAt + return r +} + +func (r *ListOrdersRequest) EndAt(endAt time.Time) *ListOrdersRequest { + r.endAt = &endAt + return r +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (r *ListOrdersRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (r *ListOrdersRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check status field -> json key status + if r.status != nil { + status := *r.status + + // TEMPLATE check-valid-values + switch status { + case "active", "done": + params["status"] = status + + default: + return nil, fmt.Errorf("status value %v is invalid", status) + + } + // END TEMPLATE check-valid-values + + // assign parameter of status + params["status"] = status + } else { + } + // check symbol field -> json key symbol + if r.symbol != nil { + symbol := *r.symbol + + // assign parameter of symbol + params["symbol"] = symbol + } else { + } + // check side field -> json key side + if r.side != nil { + side := *r.side + + // TEMPLATE check-valid-values + switch side { + case "buy", "sell": + params["side"] = side + + default: + return nil, fmt.Errorf("side value %v is invalid", side) + + } + // END TEMPLATE check-valid-values + + // assign parameter of side + params["side"] = side + } else { + } + // check orderType field -> json key type + if r.orderType != nil { + orderType := *r.orderType + + // assign parameter of orderType + params["type"] = orderType + } else { + } + // check tradeType field -> json key tradeType + if r.tradeType != nil { + tradeType := *r.tradeType + + // assign parameter of tradeType + params["tradeType"] = tradeType + } else { + tradeType := "TRADE" + + // assign parameter of tradeType + params["tradeType"] = tradeType + } + // check startAt field -> json key startAt + if r.startAt != nil { + startAt := *r.startAt + + // assign parameter of startAt + // convert time.Time to milliseconds time stamp + params["startAt"] = strconv.FormatInt(startAt.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check endAt field -> json key endAt + if r.endAt != nil { + endAt := *r.endAt + + // assign parameter of endAt + // convert time.Time to milliseconds time stamp + params["endAt"] = strconv.FormatInt(endAt.UnixNano()/int64(time.Millisecond), 10) + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (r *ListOrdersRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := r.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (r *ListOrdersRequest) GetParametersJSON() ([]byte, error) { + params, err := r.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (r *ListOrdersRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (r *ListOrdersRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (r *ListOrdersRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := r.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (r *ListOrdersRequest) Do(ctx context.Context) (*OrderListPage, error) { + + // empty params for GET operation + var params interface{} + query, err := r.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/api/v1/orders" + + req, err := r.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := r.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data OrderListPage + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/list_sub_accounts_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/list_sub_accounts_request_requestgen.go new file mode 100644 index 0000000000..60494f61d9 --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/list_sub_accounts_request_requestgen.go @@ -0,0 +1,115 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Data -url /api/v1/sub/user -type ListSubAccountsRequest -responseDataType []SubAccount"; DO NOT EDIT. + +package kucoinapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (l *ListSubAccountsRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (l *ListSubAccountsRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (l *ListSubAccountsRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := l.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (l *ListSubAccountsRequest) GetParametersJSON() ([]byte, error) { + params, err := l.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (l *ListSubAccountsRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (l *ListSubAccountsRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (l *ListSubAccountsRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := l.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (l *ListSubAccountsRequest) Do(ctx context.Context) ([]SubAccount, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/api/v1/sub/user" + + req, err := l.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := l.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []SubAccount + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/list_symbols_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/list_symbols_request_requestgen.go new file mode 100644 index 0000000000..fa4675c1a3 --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/list_symbols_request_requestgen.go @@ -0,0 +1,127 @@ +// Code generated by "requestgen -method GET -responseType .APIResponse -responseDataField Data -type ListSymbolsRequest -url /api/v1/symbols -responseDataType []Symbol"; DO NOT EDIT. + +package kucoinapi + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "regexp" +) + +func (l *ListSymbolsRequest) Market(market string) *ListSymbolsRequest { + l.market = &market + return l +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (l *ListSymbolsRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (l *ListSymbolsRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check market field -> json key market + if l.market != nil { + market := *l.market + + // assign parameter of market + params["market"] = market + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (l *ListSymbolsRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := l.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (l *ListSymbolsRequest) GetParametersJSON() ([]byte, error) { + params, err := l.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (l *ListSymbolsRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (l *ListSymbolsRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (l *ListSymbolsRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := l.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (l *ListSymbolsRequest) Do(ctx context.Context) ([]Symbol, error) { + + // empty params for GET operation + var params interface{} + query := url.Values{} + + apiURL := "/api/v1/symbols" + + req, err := l.client.NewRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := l.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data []Symbol + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/marketdata.go b/pkg/exchange/kucoin/kucoinapi/marketdata.go new file mode 100644 index 0000000000..9cc54c296d --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/marketdata.go @@ -0,0 +1,361 @@ +package kucoinapi + +//go:generate -command GetRequest requestgen -method GET -responseType .APIResponse -responseDataField Data +//go:generate -command PostRequest requestgen -method POST -responseType .APIResponse -responseDataField Data + +import ( + "context" + "encoding/json" + "net/url" + "strconv" + "time" + + "github.com/c9s/requestgen" + "github.com/pkg/errors" + "github.com/valyala/fastjson" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +type MarketDataService struct { + client *RestClient +} + +func (s *MarketDataService) NewGetKLinesRequest() *GetKLinesRequest { + return &GetKLinesRequest{client: s.client} +} + +type Symbol struct { + Symbol string `json:"symbol"` + Name string `json:"name"` + BaseCurrency string `json:"baseCurrency"` + QuoteCurrency string `json:"quoteCurrency"` + FeeCurrency string `json:"feeCurrency"` + Market string `json:"market"` + BaseMinSize fixedpoint.Value `json:"baseMinSize"` + QuoteMinSize fixedpoint.Value `json:"quoteMinSize"` + BaseIncrement fixedpoint.Value `json:"baseIncrement"` + QuoteIncrement fixedpoint.Value `json:"quoteIncrement"` + PriceIncrement fixedpoint.Value `json:"priceIncrement"` + PriceLimitRate fixedpoint.Value `json:"priceLimitRate"` + IsMarginEnabled bool `json:"isMarginEnabled"` + EnableTrading bool `json:"enableTrading"` +} + +//go:generate GetRequest -type ListSymbolsRequest -url "/api/v1/symbols" -responseDataType []Symbol +type ListSymbolsRequest struct { + client requestgen.APIClient + market *string `param:"market"` +} + +func (s *MarketDataService) NewListSymbolsRequest() *ListSymbolsRequest { + return &ListSymbolsRequest{client: s.client} +} + +func (s *MarketDataService) ListSymbols(market ...string) ([]Symbol, error) { + req := s.NewListSymbolsRequest() + if len(market) == 1 { + req.Market(market[0]) + } else if len(market) > 1 { + return nil, errors.New("symbols api only supports one market parameter") + } + + return req.Do(context.Background()) +} + +/* +//Get Ticker +{ + "sequence": "1550467636704", + "bestAsk": "0.03715004", + "size": "0.17", + "price": "0.03715005", + "bestBidSize": "3.803", + "bestBid": "0.03710768", + "bestAskSize": "1.788", + "time": 1550653727731 +} +*/ +type Ticker struct { + Sequence string `json:"sequence"` + Size fixedpoint.Value `json:"size"` + Price fixedpoint.Value `json:"price"` + BestAsk fixedpoint.Value `json:"bestAsk"` + BestBid fixedpoint.Value `json:"bestBid"` + BestBidSize fixedpoint.Value `json:"bestBidSize"` + Time types.MillisecondTimestamp `json:"time"` +} + +//go:generate GetRequest -type GetTickerRequest -url "/api/v1/market/orderbook/level1" -responseDataType Ticker +type GetTickerRequest struct { + client requestgen.APIClient + symbol string `param:"symbol,query"` +} + +func (s *MarketDataService) NewGetTickerRequest(symbol string) *GetTickerRequest { + return &GetTickerRequest{client: s.client, symbol: symbol} +} + +/* +{ + "time":1602832092060, + "ticker":[ + { + "symbol": "BTC-USDT", // symbol + "symbolName":"BTC-USDT", // SymbolName of trading pairs, it would change after renaming + "buy": "11328.9", // bestAsk + "sell": "11329", // bestBid + "changeRate": "-0.0055", // 24h change rate + "changePrice": "-63.6", // 24h change price + "high": "11610", // 24h highest price + "low": "11200", // 24h lowest price + "vol": "2282.70993217", // 24h volume,the aggregated trading volume in BTC + "volValue": "25984946.157790431", // 24h total, the trading volume in quote currency of last 24 hours + "last": "11328.9", // last price + "averagePrice": "11360.66065903", // 24h average transaction price yesterday + "takerFeeRate": "0.001", // Basic Taker Fee + "makerFeeRate": "0.001", // Basic Maker Fee + "takerCoefficient": "1", // Taker Fee Coefficient + "makerCoefficient": "1" // Maker Fee Coefficient + } + ] +} +*/ + +type Ticker24H struct { + Symbol string `json:"symbol"` + SymbolName string `json:"symbolName"` + Buy fixedpoint.Value `json:"buy"` + Sell fixedpoint.Value `json:"sell"` + ChangeRate fixedpoint.Value `json:"changeRate"` + ChangePrice fixedpoint.Value `json:"changePrice"` + High fixedpoint.Value `json:"high"` + Low fixedpoint.Value `json:"low"` + Last fixedpoint.Value `json:"last"` + AveragePrice fixedpoint.Value `json:"averagePrice"` + Volume fixedpoint.Value `json:"vol"` // base volume + VolumeValue fixedpoint.Value `json:"volValue"` // quote volume + + TakerFeeRate fixedpoint.Value `json:"takerFeeRate"` + MakerFeeRate fixedpoint.Value `json:"makerFeeRate"` + + TakerCoefficient fixedpoint.Value `json:"takerCoefficient"` + MakerCoefficient fixedpoint.Value `json:"makerCoefficient"` + + Time types.MillisecondTimestamp `json:"time"` +} + +type AllTickers struct { + Time types.MillisecondTimestamp `json:"time"` + Ticker []Ticker24H `json:"ticker"` +} + +//go:generate GetRequest -type GetAllTickersRequest -url "/api/v1/market/allTickers" -responseDataType AllTickers +type GetAllTickersRequest struct { + client requestgen.APIClient +} + +func (s *MarketDataService) ListTickers() (*AllTickers, error) { + req := &GetAllTickersRequest{client: s.client} + return req.Do(context.Background()) +} + +func (s *MarketDataService) GetTicker24HStat(symbol string) (*Ticker24H, error) { + var params = url.Values{} + params.Add("symbol", symbol) + + req, err := s.client.NewRequest(context.Background(), "GET", "/api/v1/market/stats", params, nil) + if err != nil { + return nil, err + } + + response, err := s.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data *Ticker24H `json:"data"` + } + + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + + return apiResponse.Data, nil +} + +/* +{ + "sequence": "3262786978", + "time": 1550653727731, + "bids": [["6500.12", "0.45054140"], + ["6500.11", "0.45054140"]], //[price,size] + "asks": [["6500.16", "0.57753524"], + ["6500.15", "0.57753524"]] +} +*/ +type OrderBook struct { + Sequence string `json:"sequence,omitempty"` + Time types.MillisecondTimestamp `json:"time"` + Bids types.PriceVolumeSlice `json:"bids,omitempty"` + Asks types.PriceVolumeSlice `json:"asks,omitempty"` +} + +//go:generate GetRequest -type GetOrderBookLevel2Depth20Request -url "/api/v1/market/orderbook/level2_20" -responseDataType .OrderBook +type GetOrderBookLevel2Depth20Request struct { + client requestgen.APIClient + symbol string `param:"symbol,query"` +} + +//go:generate GetRequest -type GetOrderBookLevel2Depth100Request -url "/api/v1/market/orderbook/level2_100" -responseDataType .OrderBook +type GetOrderBookLevel2Depth100Request struct { + client requestgen.APIClient + symbol string `param:"symbol,query"` +} + +//go:generate GetRequest -type GetOrderBookLevel2DepthAllRequest -url "/api/v3/market/orderbook/level2" -responseDataType .OrderBook +type GetOrderBookLevel2DepthAllRequest struct { + client requestgen.AuthenticatedAPIClient + symbol string `param:"symbol,query"` +} + +type OrderBookRequest interface { + Do(ctx context.Context) (*OrderBook, error) +} + +func (s *MarketDataService) NewGetOrderBookRequest(symbol string, depth int) OrderBookRequest { + switch depth { + case 20: + return &GetOrderBookLevel2Depth20Request{client: s.client, symbol: symbol} + + case 100: + return &GetOrderBookLevel2Depth100Request{client: s.client, symbol: symbol} + } + + return &GetOrderBookLevel2DepthAllRequest{client: s.client, symbol: symbol} +} + +func (s *MarketDataService) GetOrderBook(symbol string, depth int) (*OrderBook, error) { + req := s.NewGetOrderBookRequest(symbol, depth) + return req.Do(context.Background()) +} + +//go:generate requestgen -type GetKLinesRequest +type GetKLinesRequest struct { + client *RestClient + + symbol string `param:"symbol"` + + interval string `param:"type" validValues:"1min,3min,5min,15min,30min,1hour,2hour,4hour,6hour,8hour,12hour,1day,1week"` + + startAt *time.Time `param:"startAt,seconds"` + + endAt *time.Time `param:"endAt,seconds"` +} + +type KLine struct { + Symbol string + Interval string + StartTime time.Time + Open fixedpoint.Value + High fixedpoint.Value + Low fixedpoint.Value + Close fixedpoint.Value + Volume, QuoteVolume fixedpoint.Value +} + +func (r *GetKLinesRequest) Do(ctx context.Context) ([]KLine, error) { + params, err := r.GetParametersQuery() + if err != nil { + return nil, err + } + + req, err := r.client.NewRequest(ctx, "GET", "/api/v1/market/candles", params, nil) + if err != nil { + return nil, err + } + + response, err := r.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data json.RawMessage `json:"data"` + } + + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + + if apiResponse.Data == nil { + return nil, errors.New("api error: [" + apiResponse.Code + "] " + apiResponse.Message) + } + + return parseKLines(apiResponse.Data, r.symbol, r.interval) +} + +func parseKLines(b []byte, symbol, interval string) (klines []KLine, err error) { + s, err := fastjson.ParseBytes(b) + if err != nil { + return klines, err + } + + for _, v := range s.GetArray() { + arr := v.GetArray() + ts, err := strconv.ParseInt(string(arr[0].GetStringBytes()), 10, 64) + if err != nil { + return klines, err + } + + o, err := fixedpoint.NewFromString(string(arr[1].GetStringBytes())) + if err != nil { + return klines, err + } + + c, err := fixedpoint.NewFromString(string(arr[2].GetStringBytes())) + if err != nil { + return klines, err + } + + h, err := fixedpoint.NewFromString(string(arr[3].GetStringBytes())) + if err != nil { + return klines, err + } + + l, err := fixedpoint.NewFromString(string(arr[4].GetStringBytes())) + if err != nil { + return klines, err + } + + vv, err := fixedpoint.NewFromString(string(arr[5].GetStringBytes())) + if err != nil { + return klines, err + } + + qv, err := fixedpoint.NewFromString(string(arr[6].GetStringBytes())) + if err != nil { + return klines, err + } + + klines = append(klines, KLine{ + Symbol: symbol, + Interval: interval, + StartTime: time.Unix(ts, 0), + Open: o, + High: h, + Low: l, + Close: c, + Volume: vv, + QuoteVolume: qv, + }) + } + + return klines, err +} diff --git a/pkg/exchange/kucoin/kucoinapi/place_order_request_requestgen.go b/pkg/exchange/kucoin/kucoinapi/place_order_request_requestgen.go new file mode 100644 index 0000000000..50b7557bcd --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/place_order_request_requestgen.go @@ -0,0 +1,251 @@ +// Code generated by "requestgen -method POST -responseType .APIResponse -responseDataField Data -url /api/v1/orders -type PlaceOrderRequest -responseDataType .OrderResponse"; DO NOT EDIT. + +package kucoinapi + +import ( + "context" + "encoding/json" + "fmt" + "github.com/google/uuid" + "net/url" + "regexp" +) + +func (r *PlaceOrderRequest) ClientOrderID(clientOrderID string) *PlaceOrderRequest { + r.clientOrderID = &clientOrderID + return r +} + +func (r *PlaceOrderRequest) Symbol(symbol string) *PlaceOrderRequest { + r.symbol = symbol + return r +} + +func (r *PlaceOrderRequest) Tag(tag string) *PlaceOrderRequest { + r.tag = &tag + return r +} + +func (r *PlaceOrderRequest) Side(side SideType) *PlaceOrderRequest { + r.side = side + return r +} + +func (r *PlaceOrderRequest) OrderType(orderType OrderType) *PlaceOrderRequest { + r.orderType = orderType + return r +} + +func (r *PlaceOrderRequest) Size(size string) *PlaceOrderRequest { + r.size = size + return r +} + +func (r *PlaceOrderRequest) Price(price string) *PlaceOrderRequest { + r.price = &price + return r +} + +func (r *PlaceOrderRequest) TimeInForce(timeInForce TimeInForceType) *PlaceOrderRequest { + r.timeInForce = &timeInForce + return r +} + +func (r *PlaceOrderRequest) PostOnly(postOnly bool) *PlaceOrderRequest { + r.postOnly = &postOnly + return r +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (r *PlaceOrderRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (r *PlaceOrderRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check clientOrderID field -> json key clientOid + if r.clientOrderID != nil { + clientOrderID := *r.clientOrderID + + // TEMPLATE check-required + if len(clientOrderID) == 0 { + return nil, fmt.Errorf("clientOid is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of clientOrderID + params["clientOid"] = clientOrderID + } else { + // assign default of clientOrderID + clientOrderID := uuid.New().String() + // assign parameter of clientOrderID + params["clientOid"] = clientOrderID + } + // check symbol field -> json key symbol + symbol := r.symbol + + // TEMPLATE check-required + if len(symbol) == 0 { + return nil, fmt.Errorf("symbol is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of symbol + params["symbol"] = symbol + // check tag field -> json key tag + if r.tag != nil { + tag := *r.tag + + // assign parameter of tag + params["tag"] = tag + } else { + } + // check side field -> json key side + side := r.side + + // assign parameter of side + params["side"] = side + // check orderType field -> json key ordType + orderType := r.orderType + + // assign parameter of orderType + params["ordType"] = orderType + // check size field -> json key size + size := r.size + + // TEMPLATE check-required + if len(size) == 0 { + return nil, fmt.Errorf("size is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of size + params["size"] = size + // check price field -> json key price + if r.price != nil { + price := *r.price + + // assign parameter of price + params["price"] = price + } else { + } + // check timeInForce field -> json key timeInForce + if r.timeInForce != nil { + timeInForce := *r.timeInForce + + // TEMPLATE check-required + if len(timeInForce) == 0 { + return nil, fmt.Errorf("timeInForce is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of timeInForce + params["timeInForce"] = timeInForce + } else { + } + // check postOnly field -> json key postOnly + if r.postOnly != nil { + postOnly := *r.postOnly + + // assign parameter of postOnly + params["postOnly"] = postOnly + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (r *PlaceOrderRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := r.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (r *PlaceOrderRequest) GetParametersJSON() ([]byte, error) { + params, err := r.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (r *PlaceOrderRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (r *PlaceOrderRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (r *PlaceOrderRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := r.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (r *PlaceOrderRequest) Do(ctx context.Context) (*OrderResponse, error) { + + params, err := r.GetParameters() + if err != nil { + return nil, err + } + query := url.Values{} + + apiURL := "/api/v1/orders" + + req, err := r.client.NewAuthenticatedRequest(ctx, "POST", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := r.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse APIResponse + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + var data OrderResponse + if err := json.Unmarshal(apiResponse.Data, &data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/trade.go b/pkg/exchange/kucoin/kucoinapi/trade.go new file mode 100644 index 0000000000..785a3119ab --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/trade.go @@ -0,0 +1,337 @@ +package kucoinapi + +//go:generate -command GetRequest requestgen -method GET -responseType .APIResponse -responseDataField Data +//go:generate -command PostRequest requestgen -method POST -responseType .APIResponse -responseDataField Data +//go:generate -command DeleteRequest requestgen -method DELETE -responseType .APIResponse -responseDataField Data + +import ( + "context" + "time" + + "github.com/c9s/requestgen" + "github.com/pkg/errors" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +type TradeService struct { + client *RestClient +} + +type OrderResponse struct { + OrderID string `json:"orderId"` +} + +func (c *TradeService) NewListHistoryOrdersRequest() *ListHistoryOrdersRequest { + return &ListHistoryOrdersRequest{client: c.client} + +} + +func (c *TradeService) NewPlaceOrderRequest() *PlaceOrderRequest { + return &PlaceOrderRequest{client: c.client} +} + +func (c *TradeService) NewBatchPlaceOrderRequest() *BatchPlaceOrderRequest { + return &BatchPlaceOrderRequest{client: c.client} +} + +func (c *TradeService) NewCancelOrderRequest() *CancelOrderRequest { + return &CancelOrderRequest{client: c.client} +} + +func (c *TradeService) NewCancelAllOrderRequest() *CancelAllOrderRequest { + return &CancelAllOrderRequest{client: c.client} +} + +func (c *TradeService) NewGetFillsRequest() *GetFillsRequest { + return &GetFillsRequest{client: c.client} +} + +//go:generate GetRequest -url /api/v1/fills -type GetFillsRequest -responseDataType .FillListPage +type GetFillsRequest struct { + client requestgen.AuthenticatedAPIClient + + orderID *string `param:"orderId"` + + tradeType *string `param:"tradeType" default:"TRADE"` + + symbol *string `param:"symbol"` + + side *string `param:"side" validValues:"buy,sell"` + + orderType *string `param:"type" validValues:"limit,market,limit_stop,market_stop"` + + startAt *time.Time `param:"startAt,milliseconds"` + + endAt *time.Time `param:"endAt,milliseconds"` +} + +type FillListPage struct { + CurrentPage int `json:"currentPage"` + PageSize int `json:"pageSize"` + TotalNumber int `json:"totalNum"` + TotalPage int `json:"totalPage"` + Items []Fill `json:"items"` +} + +type Fill struct { + Symbol string `json:"symbol"` + TradeId string `json:"tradeId"` + OrderId string `json:"orderId"` + CounterOrderId string `json:"counterOrderId"` + Side SideType `json:"side"` + Liquidity LiquidityType `json:"liquidity"` + ForceTaker bool `json:"forceTaker"` + Price fixedpoint.Value `json:"price"` + Size fixedpoint.Value `json:"size"` + Funds fixedpoint.Value `json:"funds"` + Fee fixedpoint.Value `json:"fee"` + FeeRate fixedpoint.Value `json:"feeRate"` + FeeCurrency string `json:"feeCurrency"` + Stop string `json:"stop"` + Type OrderType `json:"type"` + CreatedAt types.MillisecondTimestamp `json:"createdAt"` + TradeType TradeType `json:"tradeType"` +} + +//go:generate GetRequest -url /api/v1/hist-orders -type ListHistoryOrdersRequest -responseDataType .HistoryOrderListPage +type ListHistoryOrdersRequest struct { + client requestgen.AuthenticatedAPIClient + + symbol *string `param:"symbol"` + + startAt *time.Time `param:"startAt,milliseconds"` + + endAt *time.Time `param:"endAt,milliseconds"` +} + +type HistoryOrder struct { + Symbol string `json:"symbol"` + DealPrice string `json:"dealPrice"` + DealValue string `json:"dealValue"` + Amount string `json:"amount"` + Fee string `json:"fee"` + Side string `json:"side"` + CreatedAt int `json:"createdAt"` +} + +type HistoryOrderListPage struct { + CurrentPage int `json:"currentPage"` + PageSize int `json:"pageSize"` + TotalNum int `json:"totalNum"` + TotalPage int `json:"totalPage"` + Items []HistoryOrder `json:"items"` +} + +//go:generate GetRequest -url /api/v1/orders -type ListOrdersRequest -responseDataType .OrderListPage +type ListOrdersRequest struct { + client requestgen.AuthenticatedAPIClient + + status *string `param:"status" validValues:"active,done"` + + symbol *string `param:"symbol"` + + side *SideType `param:"side" validValues:"buy,sell"` + + orderType *OrderType `param:"type"` + + tradeType *TradeType `param:"tradeType" default:"TRADE"` + + startAt *time.Time `param:"startAt,milliseconds"` + + endAt *time.Time `param:"endAt,milliseconds"` +} + +type Order struct { + ID string `json:"id"` + Symbol string `json:"symbol"` + OperationType string `json:"opType"` + Type string `json:"type"` + Side string `json:"side"` + Price fixedpoint.Value `json:"price"` + Size fixedpoint.Value `json:"size"` + Funds fixedpoint.Value `json:"funds"` + DealFunds fixedpoint.Value `json:"dealFunds"` + DealSize fixedpoint.Value `json:"dealSize"` + Fee fixedpoint.Value `json:"fee"` + FeeCurrency string `json:"feeCurrency"` + StopType string `json:"stop"` + StopTriggerred bool `json:"stopTriggered"` + StopPrice fixedpoint.Value `json:"stopPrice"` + TimeInForce TimeInForceType `json:"timeInForce"` + PostOnly bool `json:"postOnly"` + Hidden bool `json:"hidden"` + Iceberg bool `json:"iceberg"` + Channel string `json:"channel"` + ClientOrderID string `json:"clientOid"` + Remark string `json:"remark"` + IsActive bool `json:"isActive"` + CancelExist bool `json:"cancelExist"` + CreatedAt types.MillisecondTimestamp `json:"createdAt"` +} + +type OrderListPage struct { + CurrentPage int `json:"currentPage"` + PageSize int `json:"pageSize"` + TotalNumber int `json:"totalNum"` + TotalPage int `json:"totalPage"` + Items []Order `json:"items"` +} + +func (c *TradeService) NewListOrdersRequest() *ListOrdersRequest { + return &ListOrdersRequest{client: c.client} +} + +//go:generate PostRequest -url /api/v1/orders -type PlaceOrderRequest -responseDataType .OrderResponse +type PlaceOrderRequest struct { + client requestgen.AuthenticatedAPIClient + + // A combination of case-sensitive alphanumerics, all numbers, or all letters of up to 32 characters. + clientOrderID *string `param:"clientOid,required" defaultValuer:"uuid()"` + + symbol string `param:"symbol,required"` + + // A combination of case-sensitive alphanumerics, all numbers, or all letters of up to 8 characters. + tag *string `param:"tag"` + + // "buy" or "sell" + side SideType `param:"side"` + + orderType OrderType `param:"ordType"` + + // limit order parameters + size string `param:"size,required"` + + price *string `param:"price"` + + timeInForce *TimeInForceType `param:"timeInForce,required"` + + postOnly *bool `param:"postOnly"` +} + +type CancelOrderResponse struct { + CancelledOrderIDs []string `json:"cancelledOrderIds,omitempty"` + + // used when using client order id for canceling order + CancelledOrderId string `json:"cancelledOrderId,omitempty"` + ClientOrderID string `json:"clientOid,omitempty"` +} + +//go:generate requestgen -type CancelOrderRequest +type CancelOrderRequest struct { + client requestgen.AuthenticatedAPIClient + + orderID *string `param:"orderID"` + clientOrderID *string `param:"clientOrderID"` +} + +func (r *CancelOrderRequest) Do(ctx context.Context) (*CancelOrderResponse, error) { + if r.orderID == nil && r.clientOrderID == nil { + return nil, errors.New("either orderID or clientOrderID is required for canceling order") + } + + var refURL string + + if r.orderID != nil { + refURL = "/api/v1/orders/" + *r.orderID + } else if r.clientOrderID != nil { + refURL = "/api/v1/order/client-order/" + *r.clientOrderID + } + + req, err := r.client.NewAuthenticatedRequest(ctx, "DELETE", refURL, nil, nil) + if err != nil { + return nil, err + } + + response, err := r.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data *CancelOrderResponse `json:"data"` + } + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + + if apiResponse.Data == nil { + return nil, errors.New("api error: [" + apiResponse.Code + "] " + apiResponse.Message) + } + + return apiResponse.Data, nil +} + +//go:generate DeleteRequest -url /api/v1/orders -type CancelAllOrderRequest -responseDataType .CancelOrderResponse +type CancelAllOrderRequest struct { + client requestgen.AuthenticatedAPIClient + + symbol *string `param:"symbol"` + tradeType *string `param:"tradeType"` +} + +// Request via this endpoint to place 5 orders at the same time. +// The order type must be a limit order of the same symbol. +// The interface currently only supports spot trading +type BatchPlaceOrderRequest struct { + client *RestClient + + symbol string + reqs []*PlaceOrderRequest +} + +func (r *BatchPlaceOrderRequest) Symbol(symbol string) *BatchPlaceOrderRequest { + r.symbol = symbol + return r +} + +func (r *BatchPlaceOrderRequest) Add(reqs ...*PlaceOrderRequest) *BatchPlaceOrderRequest { + r.reqs = append(r.reqs, reqs...) + return r +} + +func (r *BatchPlaceOrderRequest) Do(ctx context.Context) ([]OrderResponse, error) { + var orderList []map[string]interface{} + for _, req := range r.reqs { + params, err := req.GetParameters() + if err != nil { + return nil, err + } + + orderList = append(orderList, params) + } + + var payload = map[string]interface{}{ + "symbol": r.symbol, + "orderList": orderList, + } + + req, err := r.client.NewAuthenticatedRequest(ctx, "POST", "/api/v1/orders/multi", nil, payload) + if err != nil { + return nil, err + } + + response, err := r.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []OrderResponse `json:"data"` + } + + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + + if apiResponse.Data == nil { + return nil, errors.New("api error: [" + apiResponse.Code + "] " + apiResponse.Message) + } + + return apiResponse.Data, nil +} diff --git a/pkg/exchange/kucoin/kucoinapi/types.go b/pkg/exchange/kucoin/kucoinapi/types.go new file mode 100644 index 0000000000..90294be3d0 --- /dev/null +++ b/pkg/exchange/kucoin/kucoinapi/types.go @@ -0,0 +1,64 @@ +package kucoinapi + +type AccountType string + +const ( + AccountTypeMain AccountType = "main" + AccountTypeTrade AccountType = "trade" + AccountTypeMargin AccountType = "margin" + AccountTypePool AccountType = "pool" +) + +type TradeType string + +const ( + TradeTypeSpot TradeType = "TRADE" + TradeTypeMargin TradeType = "MARGIN" +) + +type SideType string + +const ( + SideTypeBuy SideType = "buy" + SideTypeSell SideType = "sell" +) + +type TimeInForceType string + +const ( + // GTC Good Till Canceled orders remain open on the book until canceled. This is the default behavior if no policy is specified. + TimeInForceGTC TimeInForceType = "GTC" + + // GTT Good Till Time orders remain open on the book until canceled or the allotted cancelAfter is depleted on the matching engine. GTT orders are guaranteed to cancel before any other order is processed after the cancelAfter seconds placed in order book. + TimeInForceGTT TimeInForceType = "GTT" + + // FOK Fill Or Kill orders are rejected if the entire size cannot be matched. + TimeInForceFOK TimeInForceType = "FOK" + + // IOC Immediate Or Cancel orders instantly cancel the remaining size of the limit order instead of opening it on the book. + TimeInForceIOC TimeInForceType = "IOC" +) + +type LiquidityType string + +const ( + LiquidityTypeMaker LiquidityType = "maker" + LiquidityTypeTaker LiquidityType = "taker" +) + +type OrderType string + +const ( + OrderTypeMarket OrderType = "market" + OrderTypeLimit OrderType = "limit" + OrderTypeStopLimit OrderType = "stop_limit" +) + +type OrderState string + +const ( + OrderStateCanceled OrderState = "canceled" + OrderStateLive OrderState = "live" + OrderStatePartiallyFilled OrderState = "partially_filled" + OrderStateFilled OrderState = "filled" +) diff --git a/pkg/exchange/kucoin/parse.go b/pkg/exchange/kucoin/parse.go new file mode 100644 index 0000000000..2a51b39b3a --- /dev/null +++ b/pkg/exchange/kucoin/parse.go @@ -0,0 +1,104 @@ +package kucoin + +import ( + "encoding/json" + "strings" + + "github.com/c9s/bbgo/pkg/types" +) + +func parseWebSocketEvent(in []byte) (interface{}, error) { + var resp WebSocketEvent + var err = json.Unmarshal(in, &resp) + if err != nil { + return nil, err + } + + switch resp.Type { + case WebSocketMessageTypeAck: + return &resp, nil + + case WebSocketMessageTypeError: + resp.Object = string(resp.Data) + return &resp, nil + + case WebSocketMessageTypeMessage: + switch resp.Subject { + case WebSocketSubjectOrderChange: + var o WebSocketPrivateOrderEvent + if err := json.Unmarshal(resp.Data, &o); err != nil { + return &resp, err + } + resp.Object = &o + + case WebSocketSubjectAccountBalance: + var o WebSocketAccountBalanceEvent + if err := json.Unmarshal(resp.Data, &o); err != nil { + return &resp, err + } + resp.Object = &o + + case WebSocketSubjectTradeCandlesUpdate, WebSocketSubjectTradeCandlesAdd: + var o WebSocketCandleEvent + if err := json.Unmarshal(resp.Data, &o); err != nil { + return &resp, err + } + + o.Interval = extractIntervalFromTopic(resp.Topic) + o.Add = resp.Subject == WebSocketSubjectTradeCandlesAdd + resp.Object = &o + + case WebSocketSubjectTradeL2Update: + var o WebSocketOrderBookL2Event + if err := json.Unmarshal(resp.Data, &o); err != nil { + return &resp, err + } + resp.Object = &o + + case WebSocketSubjectTradeTicker: + var o WebSocketTickerEvent + if err := json.Unmarshal(resp.Data, &o); err != nil { + return &resp, err + } + resp.Object = &o + + default: + // return nil, fmt.Errorf("kucoin: unsupported subject: %s", resp.Subject) + + } + } + + return &resp, nil +} + +func extractIntervalFromTopic(topic string) types.Interval { + ta := strings.Split(topic, ":") + tb := strings.Split(ta[1], "_") + interval := tb[1] + return toGlobalInterval(interval) +} + +func toGlobalInterval(a string) types.Interval { + switch a { + case "1min": + return types.Interval1m + case "5min": + return types.Interval5m + case "15min": + return types.Interval15m + case "30min": + return types.Interval30m + case "1hour": + return types.Interval1h + case "2hour": + return types.Interval2h + case "4hour": + return types.Interval4h + case "6hour": + return types.Interval6h + case "12hour": + return types.Interval12h + + } + return "" +} diff --git a/pkg/exchange/kucoin/stream.go b/pkg/exchange/kucoin/stream.go new file mode 100644 index 0000000000..f6ad67f973 --- /dev/null +++ b/pkg/exchange/kucoin/stream.go @@ -0,0 +1,313 @@ +package kucoin + +import ( + "context" + "time" + + "github.com/gorilla/websocket" + "github.com/pkg/errors" + + "github.com/c9s/bbgo/pkg/depth" + "github.com/c9s/bbgo/pkg/exchange/kucoin/kucoinapi" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" + "github.com/c9s/bbgo/pkg/util" +) + +const readTimeout = 30 * time.Second + +//go:generate callbackgen -type Stream -interface +type Stream struct { + types.StandardStream + + client *kucoinapi.RestClient + exchange *Exchange + + bullet *kucoinapi.Bullet + candleEventCallbacks []func(candle *WebSocketCandleEvent, e *WebSocketEvent) + orderBookL2EventCallbacks []func(e *WebSocketOrderBookL2Event) + tickerEventCallbacks []func(e *WebSocketTickerEvent) + accountBalanceEventCallbacks []func(e *WebSocketAccountBalanceEvent) + privateOrderEventCallbacks []func(e *WebSocketPrivateOrderEvent) + + lastCandle map[string]types.KLine + depthBuffers map[string]*depth.Buffer +} + +func NewStream(client *kucoinapi.RestClient, ex *Exchange) *Stream { + stream := &Stream{ + StandardStream: types.NewStandardStream(), + client: client, + exchange: ex, + lastCandle: make(map[string]types.KLine), + depthBuffers: make(map[string]*depth.Buffer), + } + + stream.SetParser(parseWebSocketEvent) + stream.SetDispatcher(stream.dispatchEvent) + stream.SetEndpointCreator(stream.getEndpoint) + + stream.OnConnect(stream.handleConnect) + stream.OnCandleEvent(stream.handleCandleEvent) + stream.OnOrderBookL2Event(stream.handleOrderBookL2Event) + stream.OnTickerEvent(stream.handleTickerEvent) + stream.OnPrivateOrderEvent(stream.handlePrivateOrderEvent) + stream.OnAccountBalanceEvent(stream.handleAccountBalanceEvent) + return stream +} + +func (s *Stream) handleCandleEvent(candle *WebSocketCandleEvent, e *WebSocketEvent) { + kline := candle.KLine() + last, ok := s.lastCandle[e.Topic] + if ok && kline.StartTime.After(last.StartTime.Time()) || e.Subject == WebSocketSubjectTradeCandlesAdd { + last.Closed = true + s.EmitKLineClosed(last) + } + + s.EmitKLine(kline) + s.lastCandle[e.Topic] = kline +} + +func (s *Stream) handleOrderBookL2Event(e *WebSocketOrderBookL2Event) { + f, ok := s.depthBuffers[e.Symbol] + if ok { + f.AddUpdate(types.SliceOrderBook{ + Symbol: toGlobalSymbol(e.Symbol), + Bids: e.Changes.Bids, + Asks: e.Changes.Asks, + }, e.SequenceStart, e.SequenceEnd) + } else { + f = depth.NewBuffer(func() (types.SliceOrderBook, int64, error) { + return s.exchange.QueryDepth(context.Background(), e.Symbol) + }) + s.depthBuffers[e.Symbol] = f + f.SetBufferingPeriod(time.Second) + f.OnReady(func(snapshot types.SliceOrderBook, updates []depth.Update) { + if valid, err := snapshot.IsValid(); !valid { + log.Errorf("depth snapshot is invalid, error: %v", err) + return + } + + s.EmitBookSnapshot(snapshot) + for _, u := range updates { + s.EmitBookUpdate(u.Object) + } + }) + f.OnPush(func(update depth.Update) { + s.EmitBookUpdate(update.Object) + }) + } +} + +func (s *Stream) handleTickerEvent(e *WebSocketTickerEvent) {} + +func (s *Stream) handleAccountBalanceEvent(e *WebSocketAccountBalanceEvent) { + bm := types.BalanceMap{} + bm[e.Currency] = types.Balance{ + Currency: e.Currency, + Available: e.Available, + Locked: e.Hold, + } + s.StandardStream.EmitBalanceUpdate(bm) +} + +func (s *Stream) handlePrivateOrderEvent(e *WebSocketPrivateOrderEvent) { + if e.Type == "match" { + s.StandardStream.EmitTradeUpdate(types.Trade{ + OrderID: hashStringID(e.OrderId), + ID: hashStringID(e.TradeId), + Exchange: types.ExchangeKucoin, + Price: e.MatchPrice, + Quantity: e.MatchSize, + QuoteQuantity: e.MatchPrice.Mul(e.MatchSize), + Symbol: toGlobalSymbol(e.Symbol), + Side: toGlobalSide(e.Side), + IsBuyer: e.Side == "buy", + IsMaker: e.Liquidity == "maker", + Time: types.Time(e.Ts.Time()), + Fee: fixedpoint.Zero, // not supported + FeeCurrency: "", // not supported + }) + } + + switch e.Type { + case "open", "match", "filled", "canceled": + status := types.OrderStatusNew + if e.Status == "done" { + if e.FilledSize == e.Size { + status = types.OrderStatusFilled + } else { + status = types.OrderStatusCanceled + } + } else if e.Status == "open" { + if e.FilledSize.Sign() > 0 { + status = types.OrderStatusPartiallyFilled + } + } + + s.StandardStream.EmitOrderUpdate(types.Order{ + SubmitOrder: types.SubmitOrder{ + ClientOrderID: e.ClientOid, + Symbol: toGlobalSymbol(e.Symbol), + Side: toGlobalSide(e.Side), + Type: toGlobalOrderType(e.OrderType), + Quantity: e.Size, + Price: e.Price, + }, + Exchange: types.ExchangeKucoin, + OrderID: hashStringID(e.OrderId), + UUID: e.OrderId, + Status: status, + ExecutedQuantity: e.FilledSize, + IsWorking: e.Status == "open", + CreationTime: types.Time(e.OrderTime.Time()), + UpdateTime: types.Time(e.Ts.Time()), + }) + + default: + log.Warnf("unhandled private order type: %s, payload: %+v", e.Type, e) + + } +} + +func (s *Stream) handleConnect() { + if s.PublicOnly { + if err := s.sendSubscriptions(); err != nil { + log.WithError(err).Errorf("subscription error") + return + } + } else { + id := time.Now().UnixNano() / int64(time.Millisecond) + cmds := []WebSocketCommand{ + { + Id: id, + Type: WebSocketMessageTypeSubscribe, + Topic: "/spotMarket/tradeOrders", + PrivateChannel: true, + Response: true, + }, + { + Id: id + 1, + Type: WebSocketMessageTypeSubscribe, + Topic: "/account/balance", + PrivateChannel: true, + Response: true, + }, + } + for _, cmd := range cmds { + if err := s.Conn.WriteJSON(cmd); err != nil { + log.WithError(err).Errorf("private subscribe write error, cmd: %+v", cmd) + } + } + } +} + +func (s *Stream) sendSubscriptions() error { + cmds, err := convertSubscriptions(s.Subscriptions) + if err != nil { + return errors.Wrapf(err, "subscription convert error, subscriptions: %+v", s.Subscriptions) + } + + for _, cmd := range cmds { + if err := s.Conn.WriteJSON(cmd); err != nil { + return errors.Wrapf(err, "subscribe write error, cmd: %+v", cmd) + } + } + + return nil +} + +// getEndpoint use the PublicOnly flag to check whether we should allocate a public bullet or private bullet +func (s *Stream) getEndpoint(ctx context.Context) (string, error) { + var bullet *kucoinapi.Bullet + var err error + if s.PublicOnly { + bullet, err = s.client.BulletService.NewGetPublicBulletRequest().Do(ctx) + } else { + bullet, err = s.client.BulletService.NewGetPrivateBulletRequest().Do(ctx) + } + + if err != nil { + return "", err + } + + url, err := bullet.URL() + if err != nil { + return "", err + } + + s.bullet = bullet + + log.Debugf("bullet: %+v", bullet) + return url.String(), nil +} + +func (s *Stream) dispatchEvent(event interface{}) { + e, ok := event.(*WebSocketEvent) + if !ok { + return + } + + if e.Object == nil { + return + } + + switch et := e.Object.(type) { + + case *WebSocketTickerEvent: + s.EmitTickerEvent(et) + + case *WebSocketOrderBookL2Event: + s.EmitOrderBookL2Event(et) + + case *WebSocketCandleEvent: + s.EmitCandleEvent(et, e) + + case *WebSocketAccountBalanceEvent: + s.EmitAccountBalanceEvent(et) + + case *WebSocketPrivateOrderEvent: + s.EmitPrivateOrderEvent(et) + + default: + log.Warnf("unhandled event: %+v", et) + + } +} + +type WebSocketConnector interface { + Conn() *websocket.Conn + Reconnect() +} + +func ping(ctx context.Context, w WebSocketConnector, interval time.Duration) { + log.Infof("starting websocket ping worker with interval %s", interval) + + pingTicker := time.NewTicker(interval) + defer pingTicker.Stop() + + for { + select { + + case <-ctx.Done(): + log.Debug("ping worker stopped") + return + + case <-pingTicker.C: + conn := w.Conn() + + if err := conn.WriteJSON(WebSocketCommand{ + Id: util.UnixMilli(), + Type: "ping", + }); err != nil { + log.WithError(err).Error("websocket ping error", err) + w.Reconnect() + } + + if err := conn.WriteControl(websocket.PingMessage, nil, time.Now().Add(3*time.Second)); err != nil { + log.WithError(err).Error("ping error", err) + w.Reconnect() + } + } + } +} diff --git a/pkg/exchange/kucoin/stream_callbacks.go b/pkg/exchange/kucoin/stream_callbacks.go new file mode 100644 index 0000000000..944d159482 --- /dev/null +++ b/pkg/exchange/kucoin/stream_callbacks.go @@ -0,0 +1,65 @@ +// Code generated by "callbackgen -type Stream -interface"; DO NOT EDIT. + +package kucoin + +func (s *Stream) OnCandleEvent(cb func(candle *WebSocketCandleEvent, e *WebSocketEvent)) { + s.candleEventCallbacks = append(s.candleEventCallbacks, cb) +} + +func (s *Stream) EmitCandleEvent(candle *WebSocketCandleEvent, e *WebSocketEvent) { + for _, cb := range s.candleEventCallbacks { + cb(candle, e) + } +} + +func (s *Stream) OnOrderBookL2Event(cb func(e *WebSocketOrderBookL2Event)) { + s.orderBookL2EventCallbacks = append(s.orderBookL2EventCallbacks, cb) +} + +func (s *Stream) EmitOrderBookL2Event(e *WebSocketOrderBookL2Event) { + for _, cb := range s.orderBookL2EventCallbacks { + cb(e) + } +} + +func (s *Stream) OnTickerEvent(cb func(e *WebSocketTickerEvent)) { + s.tickerEventCallbacks = append(s.tickerEventCallbacks, cb) +} + +func (s *Stream) EmitTickerEvent(e *WebSocketTickerEvent) { + for _, cb := range s.tickerEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnAccountBalanceEvent(cb func(e *WebSocketAccountBalanceEvent)) { + s.accountBalanceEventCallbacks = append(s.accountBalanceEventCallbacks, cb) +} + +func (s *Stream) EmitAccountBalanceEvent(e *WebSocketAccountBalanceEvent) { + for _, cb := range s.accountBalanceEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnPrivateOrderEvent(cb func(e *WebSocketPrivateOrderEvent)) { + s.privateOrderEventCallbacks = append(s.privateOrderEventCallbacks, cb) +} + +func (s *Stream) EmitPrivateOrderEvent(e *WebSocketPrivateOrderEvent) { + for _, cb := range s.privateOrderEventCallbacks { + cb(e) + } +} + +type StreamEventHub interface { + OnCandleEvent(cb func(candle *WebSocketCandleEvent, e *WebSocketEvent)) + + OnOrderBookL2Event(cb func(e *WebSocketOrderBookL2Event)) + + OnTickerEvent(cb func(e *WebSocketTickerEvent)) + + OnAccountBalanceEvent(cb func(e *WebSocketAccountBalanceEvent)) + + OnPrivateOrderEvent(cb func(e *WebSocketPrivateOrderEvent)) +} diff --git a/pkg/exchange/kucoin/symbols.go b/pkg/exchange/kucoin/symbols.go new file mode 100644 index 0000000000..ddd85b7fb7 --- /dev/null +++ b/pkg/exchange/kucoin/symbols.go @@ -0,0 +1,1117 @@ +// Code generated by go generate; DO NOT EDIT. +package kucoin + +var symbolMap = map[string]string{ + "1EARTHUSDT": "1EARTH-USDT", + "1INCHUSDT": "1INCH-USDT", + "2CRZBTC": "2CRZ-BTC", + "2CRZUSDT": "2CRZ-USDT", + "AAVE3LUSDT": "AAVE3L-USDT", + "AAVE3SUSDT": "AAVE3S-USDT", + "AAVEBTC": "AAVE-BTC", + "AAVEKCS": "AAVE-KCS", + "AAVEUSDT": "AAVE-USDT", + "AAVEUST": "AAVE-UST", + "ABBCBTC": "ABBC-BTC", + "ABBCUSDT": "ABBC-USDT", + "ACEUSDT": "ACE-USDT", + "ACOINUSDT": "ACOIN-USDT", + "ACTBTC": "ACT-BTC", + "ACTETH": "ACT-ETH", + "ADA3LUSDT": "ADA3L-USDT", + "ADA3SUSDT": "ADA3S-USDT", + "ADABTC": "ADA-BTC", + "ADAKCS": "ADA-KCS", + "ADAUSDC": "ADA-USDC", + "ADAUSDT": "ADA-USDT", + "ADBBTC": "ADB-BTC", + "ADBETH": "ADB-ETH", + "ADXUSDT": "ADX-USDT", + "AERGOBTC": "AERGO-BTC", + "AERGOUSDT": "AERGO-USDT", + "AGIXBTC": "AGIX-BTC", + "AGIXETH": "AGIX-ETH", + "AGIXUSDT": "AGIX-USDT", + "AGLDUSDT": "AGLD-USDT", + "AIONBTC": "AION-BTC", + "AIONETH": "AION-ETH", + "AIOZUSDT": "AIOZ-USDT", + "AIUSDT": "AI-USDT", + "AKROBTC": "AKRO-BTC", + "AKROUSDT": "AKRO-USDT", + "ALBTETH": "ALBT-ETH", + "ALBTUSDT": "ALBT-USDT", + "ALEPHUSDT": "ALEPH-USDT", + "ALGOBTC": "ALGO-BTC", + "ALGOETH": "ALGO-ETH", + "ALGOKCS": "ALGO-KCS", + "ALGOUSDT": "ALGO-USDT", + "ALICEBTC": "ALICE-BTC", + "ALICEETH": "ALICE-ETH", + "ALICEUSDT": "ALICE-USDT", + "ALPACAUSDT": "ALPACA-USDT", + "ALPHABTC": "ALPHA-BTC", + "ALPHAUSDT": "ALPHA-USDT", + "AMBBTC": "AMB-BTC", + "AMBETH": "AMB-ETH", + "AMPLBTC": "AMPL-BTC", + "AMPLETH": "AMPL-ETH", + "AMPLUSDT": "AMPL-USDT", + "ANCUSDT": "ANC-USDT", + "ANCUST": "ANC-UST", + "ANKRBTC": "ANKR-BTC", + "ANKRUSDT": "ANKR-USDT", + "ANTBTC": "ANT-BTC", + "ANTUSDT": "ANT-USDT", + "AOABTC": "AOA-BTC", + "AOAUSDT": "AOA-USDT", + "API3USDT": "API3-USDT", + "APLBTC": "APL-BTC", + "APLUSDT": "APL-USDT", + "ARBTC": "AR-BTC", + "ARKERUSDT": "ARKER-USDT", + "ARPAUSDT": "ARPA-USDT", + "ARRRBTC": "ARRR-BTC", + "ARRRUSDT": "ARRR-USDT", + "ARUSDT": "AR-USDT", + "ARXUSDT": "ARX-USDT", + "ASDUSDT": "ASD-USDT", + "ATABTC": "ATA-BTC", + "ATAUSDT": "ATA-USDT", + "ATOM3LUSDT": "ATOM3L-USDT", + "ATOM3SUSDT": "ATOM3S-USDT", + "ATOMBTC": "ATOM-BTC", + "ATOMETH": "ATOM-ETH", + "ATOMKCS": "ATOM-KCS", + "ATOMUSDT": "ATOM-USDT", + "ATOMUST": "ATOM-UST", + "AUDIOBTC": "AUDIO-BTC", + "AUDIOUSDT": "AUDIO-USDT", + "AURYUSDT": "AURY-USDT", + "AVABTC": "AVA-BTC", + "AVAETH": "AVA-ETH", + "AVAUSDT": "AVA-USDT", + "AVAX3LUSDT": "AVAX3L-USDT", + "AVAX3SUSDT": "AVAX3S-USDT", + "AVAXBTC": "AVAX-BTC", + "AVAXUSDT": "AVAX-USDT", + "AXCUSDT": "AXC-USDT", + "AXPRBTC": "AXPR-BTC", + "AXPRETH": "AXPR-ETH", + "AXS3LUSDT": "AXS3L-USDT", + "AXS3SUSDT": "AXS3S-USDT", + "AXSUSDT": "AXS-USDT", + "BADGERBTC": "BADGER-BTC", + "BADGERUSDT": "BADGER-USDT", + "BAKEBTC": "BAKE-BTC", + "BAKEETH": "BAKE-ETH", + "BAKEUSDT": "BAKE-USDT", + "BALBTC": "BAL-BTC", + "BALETH": "BAL-ETH", + "BALUSDT": "BAL-USDT", + "BANDBTC": "BAND-BTC", + "BANDUSDT": "BAND-USDT", + "BASICUSDT": "BASIC-USDT", + "BATUSDT": "BAT-USDT", + "BAXBTC": "BAX-BTC", + "BAXETH": "BAX-ETH", + "BAXUSDT": "BAX-USDT", + "BCDBTC": "BCD-BTC", + "BCDETH": "BCD-ETH", + "BCH3LUSDT": "BCH3L-USDT", + "BCH3SUSDT": "BCH3S-USDT", + "BCHBTC": "BCH-BTC", + "BCHKCS": "BCH-KCS", + "BCHSVBTC": "BCHSV-BTC", + "BCHSVETH": "BCHSV-ETH", + "BCHSVKCS": "BCHSV-KCS", + "BCHSVUSDC": "BCHSV-USDC", + "BCHSVUSDT": "BCHSV-USDT", + "BCHUSDC": "BCH-USDC", + "BCHUSDT": "BCH-USDT", + "BEPROBTC": "BEPRO-BTC", + "BEPROUSDT": "BEPRO-USDT", + "BLOKUSDT": "BLOK-USDT", + "BMONUSDT": "BMON-USDT", + "BNB3LUSDT": "BNB3L-USDT", + "BNB3SUSDT": "BNB3S-USDT", + "BNBBTC": "BNB-BTC", + "BNBKCS": "BNB-KCS", + "BNBUSDT": "BNB-USDT", + "BNSBTC": "BNS-BTC", + "BNSUSDT": "BNS-USDT", + "BNTBTC": "BNT-BTC", + "BNTETH": "BNT-ETH", + "BNTUSDT": "BNT-USDT", + "BOAUSDT": "BOA-USDT", + "BOLTBTC": "BOLT-BTC", + "BOLTUSDT": "BOLT-USDT", + "BONDLYETH": "BONDLY-ETH", + "BONDLYUSDT": "BONDLY-USDT", + "BONDUSDT": "BOND-USDT", + "BOSONETH": "BOSON-ETH", + "BOSONUSDT": "BOSON-USDT", + "BTC3LUSDT": "BTC3L-USDT", + "BTC3SUSDT": "BTC3S-USDT", + "BTCDAI": "BTC-DAI", + "BTCPAX": "BTC-PAX", + "BTCTUSD": "BTC-TUSD", + "BTCUSDC": "BTC-USDC", + "BTCUSDT": "BTC-USDT", + "BTCUST": "BTC-UST", + "BTTBTC": "BTT-BTC", + "BTTETH": "BTT-ETH", + "BTTTRX": "BTT-TRX", + "BTTUSDT": "BTT-USDT", + "BURGERBTC": "BURGER-BTC", + "BURGERUSDT": "BURGER-USDT", + "BURPUSDT": "BURP-USDT", + "BUXBTC": "BUX-BTC", + "BUXUSDT": "BUX-USDT", + "BUYBTC": "BUY-BTC", + "BUYUSDT": "BUY-USDT", + "C98USDT": "C98-USDT", + "CAKEUSDT": "CAKE-USDT", + "CAPPBTC": "CAPP-BTC", + "CAPPETH": "CAPP-ETH", + "CARDUSDT": "CARD-USDT", + "CARRBTC": "CARR-BTC", + "CARRUSDT": "CARR-USDT", + "CASBTC": "CAS-BTC", + "CASUSDT": "CAS-USDT", + "CBCBTC": "CBC-BTC", + "CBCUSDT": "CBC-USDT", + "CELOBTC": "CELO-BTC", + "CELOUSDT": "CELO-USDT", + "CEREUSDT": "CERE-USDT", + "CEURBTC": "CEUR-BTC", + "CEURUSDT": "CEUR-USDT", + "CFGBTC": "CFG-BTC", + "CFGUSDT": "CFG-USDT", + "CGGUSDT": "CGG-USDT", + "CHMBUSDT": "CHMB-USDT", + "CHRBTC": "CHR-BTC", + "CHRUSDT": "CHR-USDT", + "CHSBBTC": "CHSB-BTC", + "CHSBETH": "CHSB-ETH", + "CHZBTC": "CHZ-BTC", + "CHZUSDT": "CHZ-USDT", + "CIRUSETH": "CIRUS-ETH", + "CIRUSUSDT": "CIRUS-USDT", + "CIX100USDT": "CIX100-USDT", + "CKBBTC": "CKB-BTC", + "CKBUSDT": "CKB-USDT", + "CLVUSDT": "CLV-USDT", + "COMBUSDT": "COMB-USDT", + "COMPUSDT": "COMP-USDT", + "COTIBTC": "COTI-BTC", + "COTIUSDT": "COTI-USDT", + "COVBTC": "COV-BTC", + "COVETH": "COV-ETH", + "COVUSDT": "COV-USDT", + "CPCBTC": "CPC-BTC", + "CPCETH": "CPC-ETH", + "CPOOLUSDT": "CPOOL-USDT", + "CQTUSDT": "CQT-USDT", + "CREAMBTC": "CREAM-BTC", + "CREAMUSDT": "CREAM-USDT", + "CREDIUSDT": "CREDI-USDT", + "CROBTC": "CRO-BTC", + "CROUSDT": "CRO-USDT", + "CRPTBTC": "CRPT-BTC", + "CRPTETH": "CRPT-ETH", + "CRPTUSDT": "CRPT-USDT", + "CRVUSDT": "CRV-USDT", + "CSBTC": "CS-BTC", + "CSETH": "CS-ETH", + "CSPBTC": "CSP-BTC", + "CSPETH": "CSP-ETH", + "CTIETH": "CTI-ETH", + "CTIUSDT": "CTI-USDT", + "CTSIBTC": "CTSI-BTC", + "CTSIUSDT": "CTSI-USDT", + "CUDOSBTC": "CUDOS-BTC", + "CUDOSUSDT": "CUDOS-USDT", + "CUSDBTC": "CUSD-BTC", + "CUSDUSDT": "CUSD-USDT", + "CVBTC": "CV-BTC", + "CVCBTC": "CVC-BTC", + "CVETH": "CV-ETH", + "CWARBTC": "CWAR-BTC", + "CWARUSDT": "CWAR-USDT", + "CWSUSDT": "CWS-USDT", + "CXOBTC": "CXO-BTC", + "CXOETH": "CXO-ETH", + "DACCBTC": "DACC-BTC", + "DACCETH": "DACC-ETH", + "DAGBTC": "DAG-BTC", + "DAGETH": "DAG-ETH", + "DAGUSDT": "DAG-USDT", + "DAOUSDT": "DAO-USDT", + "DAPPTBTC": "DAPPT-BTC", + "DAPPTUSDT": "DAPPT-USDT", + "DAPPXUSDT": "DAPPX-USDT", + "DASHBTC": "DASH-BTC", + "DASHETH": "DASH-ETH", + "DASHKCS": "DASH-KCS", + "DASHUSDT": "DASH-USDT", + "DATABTC": "DATA-BTC", + "DATAUSDT": "DATA-USDT", + "DATXBTC": "DATX-BTC", + "DATXETH": "DATX-ETH", + "DCRBTC": "DCR-BTC", + "DCRETH": "DCR-ETH", + "DEGOETH": "DEGO-ETH", + "DEGOUSDT": "DEGO-USDT", + "DENTBTC": "DENT-BTC", + "DENTETH": "DENT-ETH", + "DEROBTC": "DERO-BTC", + "DEROUSDT": "DERO-USDT", + "DEXEBTC": "DEXE-BTC", + "DEXEETH": "DEXE-ETH", + "DEXEUSDT": "DEXE-USDT", + "DFIBTC": "DFI-BTC", + "DFIUSDT": "DFI-USDT", + "DFYNUSDT": "DFYN-USDT", + "DGBBTC": "DGB-BTC", + "DGBETH": "DGB-ETH", + "DGBUSDT": "DGB-USDT", + "DGTXBTC": "DGTX-BTC", + "DGTXETH": "DGTX-ETH", + "DIABTC": "DIA-BTC", + "DIAUSDT": "DIA-USDT", + "DINOUSDT": "DINO-USDT", + "DIVIUSDT": "DIVI-USDT", + "DMGUSDT": "DMG-USDT", + "DMTRUSDT": "DMTR-USDT", + "DOCKBTC": "DOCK-BTC", + "DOCKETH": "DOCK-ETH", + "DODOUSDT": "DODO-USDT", + "DOGE3LUSDT": "DOGE3L-USDT", + "DOGE3SUSDT": "DOGE3S-USDT", + "DOGEBTC": "DOGE-BTC", + "DOGEKCS": "DOGE-KCS", + "DOGEUSDC": "DOGE-USDC", + "DOGEUSDT": "DOGE-USDT", + "DORABTC": "DORA-BTC", + "DORAUSDT": "DORA-USDT", + "DOT3LUSDT": "DOT3L-USDT", + "DOT3SUSDT": "DOT3S-USDT", + "DOTBTC": "DOT-BTC", + "DOTKCS": "DOT-KCS", + "DOTUSDT": "DOT-USDT", + "DOTUST": "DOT-UST", + "DPETUSDT": "DPET-USDT", + "DPIUSDT": "DPI-USDT", + "DPRUSDT": "DPR-USDT", + "DREAMSUSDT": "DREAMS-USDT", + "DRGNBTC": "DRGN-BTC", + "DRGNETH": "DRGN-ETH", + "DSLABTC": "DSLA-BTC", + "DSLAUSDT": "DSLA-USDT", + "DVPNUSDT": "DVPN-USDT", + "DYDXUSDT": "DYDX-USDT", + "DYPETH": "DYP-ETH", + "DYPUSDT": "DYP-USDT", + "EDGBTC": "EDG-BTC", + "EDGUSDT": "EDG-USDT", + "EFXBTC": "EFX-BTC", + "EFXUSDT": "EFX-USDT", + "EGLDBTC": "EGLD-BTC", + "EGLDUSDT": "EGLD-USDT", + "ELABTC": "ELA-BTC", + "ELAETH": "ELA-ETH", + "ELAUSDT": "ELA-USDT", + "ELFBTC": "ELF-BTC", + "ELFETH": "ELF-ETH", + "ELONUSDT": "ELON-USDT", + "ENJBTC": "ENJ-BTC", + "ENJETH": "ENJ-ETH", + "ENJUSDT": "ENJ-USDT", + "ENQBTC": "ENQ-BTC", + "ENQUSDT": "ENQ-USDT", + "ENSUSDT": "ENS-USDT", + "EOS3LUSDT": "EOS3L-USDT", + "EOS3SUSDT": "EOS3S-USDT", + "EOSBTC": "EOS-BTC", + "EOSCUSDT": "EOSC-USDT", + "EOSETH": "EOS-ETH", + "EOSKCS": "EOS-KCS", + "EOSUSDC": "EOS-USDC", + "EOSUSDT": "EOS-USDT", + "EPIKUSDT": "EPIK-USDT", + "EPSBTC": "EPS-BTC", + "EPSUSDT": "EPS-USDT", + "EQXBTC": "EQX-BTC", + "EQXUSDT": "EQX-USDT", + "EQZBTC": "EQZ-BTC", + "EQZUSDT": "EQZ-USDT", + "ERGBTC": "ERG-BTC", + "ERGUSDT": "ERG-USDT", + "ERNBTC": "ERN-BTC", + "ERNUSDT": "ERN-USDT", + "ERSDLUSDT": "ERSDL-USDT", + "ETCBTC": "ETC-BTC", + "ETCETH": "ETC-ETH", + "ETCUSDT": "ETC-USDT", + "ETH2ETH": "ETH2-ETH", + "ETH3LUSDT": "ETH3L-USDT", + "ETH3SUSDT": "ETH3S-USDT", + "ETHBTC": "ETH-BTC", + "ETHDAI": "ETH-DAI", + "ETHOBTC": "ETHO-BTC", + "ETHOUSDT": "ETHO-USDT", + "ETHPAX": "ETH-PAX", + "ETHTUSD": "ETH-TUSD", + "ETHUSDC": "ETH-USDC", + "ETHUSDT": "ETH-USDT", + "ETHUST": "ETH-UST", + "ETNBTC": "ETN-BTC", + "ETNETH": "ETN-ETH", + "ETNUSDT": "ETN-USDT", + "EWTBTC": "EWT-BTC", + "EWTKCS": "EWT-KCS", + "EWTUSDT": "EWT-USDT", + "EXRDUSDT": "EXRD-USDT", + "FALCONSUSDT": "FALCONS-USDT", + "FCLETH": "FCL-ETH", + "FCLUSDT": "FCL-USDT", + "FEARUSDT": "FEAR-USDT", + "FETBTC": "FET-BTC", + "FETETH": "FET-ETH", + "FILUSDT": "FIL-USDT", + "FKXBTC": "FKX-BTC", + "FKXETH": "FKX-ETH", + "FKXUSDT": "FKX-USDT", + "FLAMEUSDT": "FLAME-USDT", + "FLOWBTC": "FLOW-BTC", + "FLOWUSDT": "FLOW-USDT", + "FLUXBTC": "FLUX-BTC", + "FLUXUSDT": "FLUX-USDT", + "FLYUSDT": "FLY-USDT", + "FORESTPLUSBTC": "FORESTPLUS-BTC", + "FORESTPLUSUSDT": "FORESTPLUS-USDT", + "FORMETH": "FORM-ETH", + "FORMUSDT": "FORM-USDT", + "FORTHUSDT": "FORTH-USDT", + "FRMUSDT": "FRM-USDT", + "FRONTBTC": "FRONT-BTC", + "FRONTUSDT": "FRONT-USDT", + "FTGUSDT": "FTG-USDT", + "FTM3LUSDT": "FTM3L-USDT", + "FTM3SUSDT": "FTM3S-USDT", + "FTMBTC": "FTM-BTC", + "FTMETH": "FTM-ETH", + "FTMUSDT": "FTM-USDT", + "FTTBTC": "FTT-BTC", + "FTTUSDT": "FTT-USDT", + "FXBTC": "FX-BTC", + "FXETH": "FX-ETH", + "FXSBTC": "FXS-BTC", + "FXSUSDT": "FXS-USDT", + "GAFIUSDT": "GAFI-USDT", + "GALAX3LUSDT": "GALAX3L-USDT", + "GALAX3SUSDT": "GALAX3S-USDT", + "GALAXUSDT": "GALAX-USDT", + "GASBTC": "GAS-BTC", + "GASUSDT": "GAS-USDT", + "GEEQUSDT": "GEEQ-USDT", + "GENSUSDT": "GENS-USDT", + "GHSTBTC": "GHST-BTC", + "GHSTUSDT": "GHST-USDT", + "GHXUSDT": "GHX-USDT", + "GLCHUSDT": "GLCH-USDT", + "GLMBTC": "GLM-BTC", + "GLMUSDT": "GLM-USDT", + "GLQBTC": "GLQ-BTC", + "GLQUSDT": "GLQ-USDT", + "GMBBTC": "GMB-BTC", + "GMBETH": "GMB-ETH", + "GMBUSDT": "GMB-USDT", + "GMEEUSDT": "GMEE-USDT", + "GOBTC": "GO-BTC", + "GODSUSDT": "GODS-USDT", + "GOETH": "GO-ETH", + "GOM2BTC": "GOM2-BTC", + "GOM2USDT": "GOM2-USDT", + "GOUSDT": "GO-USDT", + "GOVIBTC": "GOVI-BTC", + "GOVIUSDT": "GOVI-USDT", + "GRINBTC": "GRIN-BTC", + "GRINETH": "GRIN-ETH", + "GRINUSDT": "GRIN-USDT", + "GRTKCS": "GRT-KCS", + "GRTUSDT": "GRT-USDT", + "GSPIUSDT": "GSPI-USDT", + "GTCBTC": "GTC-BTC", + "GTCUSDT": "GTC-USDT", + "H3RO3SUSDT": "H3RO3S-USDT", + "HAIBTC": "HAI-BTC", + "HAIUSDT": "HAI-USDT", + "HAKAUSDT": "HAKA-USDT", + "HAPIUSDT": "HAPI-USDT", + "HARDUSDT": "HARD-USDT", + "HBARBTC": "HBAR-BTC", + "HBARUSDT": "HBAR-USDT", + "HEARTBTC": "HEART-BTC", + "HEARTUSDT": "HEART-USDT", + "HEGICBTC": "HEGIC-BTC", + "HEGICUSDT": "HEGIC-USDT", + "HEROUSDT": "HERO-USDT", + "HORDUSDT": "HORD-USDT", + "HOTCROSSUSDT": "HOTCROSS-USDT", + "HPBBTC": "HPB-BTC", + "HPBETH": "HPB-ETH", + "HTRBTC": "HTR-BTC", + "HTRUSDT": "HTR-USDT", + "HTUSDT": "HT-USDT", + "HYDRAUSDT": "HYDRA-USDT", + "HYVEBTC": "HYVE-BTC", + "HYVEUSDT": "HYVE-USDT", + "ICPBTC": "ICP-BTC", + "ICPUSDT": "ICP-USDT", + "IDEAUSDT": "IDEA-USDT", + "ILAUSDT": "ILA-USDT", + "ILVUSDT": "ILV-USDT", + "IMXUSDT": "IMX-USDT", + "INJBTC": "INJ-BTC", + "INJUSDT": "INJ-USDT", + "IOIUSDT": "IOI-USDT", + "IOSTBTC": "IOST-BTC", + "IOSTETH": "IOST-ETH", + "IOSTUSDT": "IOST-USDT", + "IOTXBTC": "IOTX-BTC", + "IOTXETH": "IOTX-ETH", + "IOTXUSDT": "IOTX-USDT", + "ISPUSDT": "ISP-USDT", + "IXSUSDT": "IXS-USDT", + "JARBTC": "JAR-BTC", + "JARUSDT": "JAR-USDT", + "JASMYUSDT": "JASMY-USDT", + "JSTUSDT": "JST-USDT", + "JUPETH": "JUP-ETH", + "JUPUSDT": "JUP-USDT", + "KAIBTC": "KAI-BTC", + "KAIETH": "KAI-ETH", + "KAIUSDT": "KAI-USDT", + "KARUSDT": "KAR-USDT", + "KATBTC": "KAT-BTC", + "KATUSDT": "KAT-USDT", + "KAVAUSDT": "KAVA-USDT", + "KCSBTC": "KCS-BTC", + "KCSETH": "KCS-ETH", + "KCSUSDT": "KCS-USDT", + "KDABTC": "KDA-BTC", + "KDAUSDT": "KDA-USDT", + "KDONUSDT": "KDON-USDT", + "KEEPBTC": "KEEP-BTC", + "KEEPUSDT": "KEEP-USDT", + "KEYBTC": "KEY-BTC", + "KEYETH": "KEY-ETH", + "KINUSDT": "KIN-USDT", + "KLAYBTC": "KLAY-BTC", + "KLAYUSDT": "KLAY-USDT", + "KLVBTC": "KLV-BTC", + "KLVTRX": "KLV-TRX", + "KLVUSDT": "KLV-USDT", + "KMAUSDT": "KMA-USDT", + "KMDBTC": "KMD-BTC", + "KMDUSDT": "KMD-USDT", + "KNCBTC": "KNC-BTC", + "KNCETH": "KNC-ETH", + "KOKUSDT": "KOK-USDT", + "KOLETH": "KOL-ETH", + "KOLUSDT": "KOL-USDT", + "KONOUSDT": "KONO-USDT", + "KRLBTC": "KRL-BTC", + "KRLUSDT": "KRL-USDT", + "KSMBTC": "KSM-BTC", + "KSMUSDT": "KSM-USDT", + "LABSETH": "LABS-ETH", + "LABSUSDT": "LABS-USDT", + "LACEETH": "LACE-ETH", + "LACEUSDT": "LACE-USDT", + "LAYERBTC": "LAYER-BTC", + "LAYERUSDT": "LAYER-USDT", + "LIKEUSDT": "LIKE-USDT", + "LINABTC": "LINA-BTC", + "LINAUSDT": "LINA-USDT", + "LINK3LUSDT": "LINK3L-USDT", + "LINK3SUSDT": "LINK3S-USDT", + "LINKBTC": "LINK-BTC", + "LINKKCS": "LINK-KCS", + "LINKUSDC": "LINK-USDC", + "LINKUSDT": "LINK-USDT", + "LITBTC": "LIT-BTC", + "LITHETH": "LITH-ETH", + "LITHUSDT": "LITH-USDT", + "LITUSDT": "LIT-USDT", + "LNCHXUSDT": "LNCHX-USDT", + "LOCGUSDT": "LOCG-USDT", + "LOCUSDT": "LOC-USDT", + "LOKIBTC": "LOKI-BTC", + "LOKIETH": "LOKI-ETH", + "LOKIUSDT": "LOKI-USDT", + "LONUSDT": "LON-USDT", + "LOOMBTC": "LOOM-BTC", + "LOOMETH": "LOOM-ETH", + "LPOOLBTC": "LPOOL-BTC", + "LPOOLUSDT": "LPOOL-USDT", + "LPTUSDT": "LPT-USDT", + "LRCBTC": "LRC-BTC", + "LRCETH": "LRC-ETH", + "LRCUSDT": "LRC-USDT", + "LSKBTC": "LSK-BTC", + "LSKETH": "LSK-ETH", + "LSSUSDT": "LSS-USDT", + "LTC3LUSDT": "LTC3L-USDT", + "LTC3SUSDT": "LTC3S-USDT", + "LTCBTC": "LTC-BTC", + "LTCETH": "LTC-ETH", + "LTCKCS": "LTC-KCS", + "LTCUSDC": "LTC-USDC", + "LTCUSDT": "LTC-USDT", + "LTOBTC": "LTO-BTC", + "LTOUSDT": "LTO-USDT", + "LTXBTC": "LTX-BTC", + "LTXUSDT": "LTX-USDT", + "LUNA3LUSDT": "LUNA3L-USDT", + "LUNA3SUSDT": "LUNA3S-USDT", + "LUNABTC": "LUNA-BTC", + "LUNAETH": "LUNA-ETH", + "LUNAKCS": "LUNA-KCS", + "LUNAUSDT": "LUNA-USDT", + "LUNAUST": "LUNA-UST", + "LYMBTC": "LYM-BTC", + "LYMETH": "LYM-ETH", + "LYMUSDT": "LYM-USDT", + "LYXEETH": "LYXE-ETH", + "LYXEUSDT": "LYXE-USDT", + "MAHABTC": "MAHA-BTC", + "MAHAUSDT": "MAHA-USDT", + "MAKIBTC": "MAKI-BTC", + "MAKIUSDT": "MAKI-USDT", + "MANA3LUSDT": "MANA3L-USDT", + "MANA3SUSDT": "MANA3S-USDT", + "MANABTC": "MANA-BTC", + "MANAETH": "MANA-ETH", + "MANAUSDT": "MANA-USDT", + "MANBTC": "MAN-BTC", + "MANUSDT": "MAN-USDT", + "MAPBTC": "MAP-BTC", + "MAPUSDT": "MAP-USDT", + "MARSHUSDT": "MARSH-USDT", + "MASKUSDT": "MASK-USDT", + "MATIC3LUSDT": "MATIC3L-USDT", + "MATIC3SUSDT": "MATIC3S-USDT", + "MATICBTC": "MATIC-BTC", + "MATICUSDT": "MATIC-USDT", + "MATICUST": "MATIC-UST", + "MATTERUSDT": "MATTER-USDT", + "MEMUSDT": "MEM-USDT", + "MFTBTC": "MFT-BTC", + "MFTUSDT": "MFT-USDT", + "MHCBTC": "MHC-BTC", + "MHCETH": "MHC-ETH", + "MHCUSDT": "MHC-USDT", + "MIRKCS": "MIR-KCS", + "MIRUSDT": "MIR-USDT", + "MIRUST": "MIR-UST", + "MITXBTC": "MITX-BTC", + "MITXUSDT": "MITX-USDT", + "MKRBTC": "MKR-BTC", + "MKRDAI": "MKR-DAI", + "MKRETH": "MKR-ETH", + "MKRUSDT": "MKR-USDT", + "MLKBTC": "MLK-BTC", + "MLKUSDT": "MLK-USDT", + "MLNBTC": "MLN-BTC", + "MLNUSDT": "MLN-USDT", + "MNETUSDT": "MNET-USDT", + "MNSTUSDT": "MNST-USDT", + "MNWUSDT": "MNW-USDT", + "MODEFIBTC": "MODEFI-BTC", + "MODEFIUSDT": "MODEFI-USDT", + "MONIUSDT": "MONI-USDT", + "MOVRETH": "MOVR-ETH", + "MOVRUSDT": "MOVR-USDT", + "MSWAPBTC": "MSWAP-BTC", + "MSWAPUSDT": "MSWAP-USDT", + "MTLBTC": "MTL-BTC", + "MTLUSDT": "MTL-USDT", + "MTRGUSDT": "MTRG-USDT", + "MTVBTC": "MTV-BTC", + "MTVETH": "MTV-ETH", + "MTVUSDT": "MTV-USDT", + "MVPBTC": "MVP-BTC", + "MVPETH": "MVP-ETH", + "MXCUSDT": "MXC-USDT", + "MXWUSDT": "MXW-USDT", + "NAKAUSDT": "NAKA-USDT", + "NANOBTC": "NANO-BTC", + "NANOETH": "NANO-ETH", + "NANOKCS": "NANO-KCS", + "NANOUSDT": "NANO-USDT", + "NDAUUSDT": "NDAU-USDT", + "NEAR3LUSDT": "NEAR3L-USDT", + "NEAR3SUSDT": "NEAR3S-USDT", + "NEARBTC": "NEAR-BTC", + "NEARUSDT": "NEAR-USDT", + "NEOBTC": "NEO-BTC", + "NEOETH": "NEO-ETH", + "NEOKCS": "NEO-KCS", + "NEOUSDT": "NEO-USDT", + "NFTBUSDT": "NFTB-USDT", + "NFTTRX": "NFT-TRX", + "NFTUSDT": "NFT-USDT", + "NGCUSDT": "NGC-USDT", + "NGLBTC": "NGL-BTC", + "NGLUSDT": "NGL-USDT", + "NGMUSDT": "NGM-USDT", + "NIFUSDT": "NIF-USDT", + "NIMBTC": "NIM-BTC", + "NIMETH": "NIM-ETH", + "NIMUSDT": "NIM-USDT", + "NKNBTC": "NKN-BTC", + "NKNUSDT": "NKN-USDT", + "NMRBTC": "NMR-BTC", + "NMRUSDT": "NMR-USDT", + "NOIABTC": "NOIA-BTC", + "NOIAUSDT": "NOIA-USDT", + "NORDBTC": "NORD-BTC", + "NORDUSDT": "NORD-USDT", + "NRGBTC": "NRG-BTC", + "NRGETH": "NRG-ETH", + "NTVRKUSDC": "NTVRK-USDC", + "NTVRKUSDT": "NTVRK-USDT", + "NUBTC": "NU-BTC", + "NULSBTC": "NULS-BTC", + "NULSETH": "NULS-ETH", + "NUMUSDT": "NUM-USDT", + "NUUSDT": "NU-USDT", + "NWCBTC": "NWC-BTC", + "NWCUSDT": "NWC-USDT", + "OCEANBTC": "OCEAN-BTC", + "OCEANETH": "OCEAN-ETH", + "ODDZUSDT": "ODDZ-USDT", + "OGNBTC": "OGN-BTC", + "OGNUSDT": "OGN-USDT", + "OLTBTC": "OLT-BTC", + "OLTETH": "OLT-ETH", + "OMBTC": "OM-BTC", + "OMGBTC": "OMG-BTC", + "OMGETH": "OMG-ETH", + "OMGUSDT": "OMG-USDT", + "OMUSDT": "OM-USDT", + "ONEBTC": "ONE-BTC", + "ONEUSDT": "ONE-USDT", + "ONTBTC": "ONT-BTC", + "ONTETH": "ONT-ETH", + "ONTUSDT": "ONT-USDT", + "OOEUSDT": "OOE-USDT", + "OPCTBTC": "OPCT-BTC", + "OPCTETH": "OPCT-ETH", + "OPCTUSDT": "OPCT-USDT", + "OPULUSDT": "OPUL-USDT", + "ORAIUSDT": "ORAI-USDT", + "ORBSBTC": "ORBS-BTC", + "ORBSUSDT": "ORBS-USDT", + "ORNUSDT": "ORN-USDT", + "OUSDBTC": "OUSD-BTC", + "OUSDUSDT": "OUSD-USDT", + "OXTBTC": "OXT-BTC", + "OXTETH": "OXT-ETH", + "OXTUSDT": "OXT-USDT", + "PAXGBTC": "PAXG-BTC", + "PAXGUSDT": "PAXG-USDT", + "PBRUSDT": "PBR-USDT", + "PBXUSDT": "PBX-USDT", + "PCXBTC": "PCX-BTC", + "PCXUSDT": "PCX-USDT", + "PDEXBTC": "PDEX-BTC", + "PDEXUSDT": "PDEX-USDT", + "PELUSDT": "PEL-USDT", + "PERPBTC": "PERP-BTC", + "PERPUSDT": "PERP-USDT", + "PHAETH": "PHA-ETH", + "PHAUSDT": "PHA-USDT", + "PHNXBTC": "PHNX-BTC", + "PHNXUSDT": "PHNX-USDT", + "PIVXBTC": "PIVX-BTC", + "PIVXETH": "PIVX-ETH", + "PIVXUSDT": "PIVX-USDT", + "PLAYBTC": "PLAY-BTC", + "PLAYETH": "PLAY-ETH", + "PLUUSDT": "PLU-USDT", + "PMONUSDT": "PMON-USDT", + "PNTBTC": "PNT-BTC", + "PNTUSDT": "PNT-USDT", + "POLCUSDT": "POLC-USDT", + "POLKBTC": "POLK-BTC", + "POLKUSDT": "POLK-USDT", + "POLSBTC": "POLS-BTC", + "POLSUSDT": "POLS-USDT", + "POLUSDT": "POL-USDT", + "POLXUSDT": "POLX-USDT", + "PONDBTC": "POND-BTC", + "PONDUSDT": "POND-USDT", + "POWRBTC": "POWR-BTC", + "POWRETH": "POWR-ETH", + "PPTBTC": "PPT-BTC", + "PPTETH": "PPT-ETH", + "PREBTC": "PRE-BTC", + "PREUSDT": "PRE-USDT", + "PROMBTC": "PROM-BTC", + "PROMUSDT": "PROM-USDT", + "PRQUSDT": "PRQ-USDT", + "PUNDIXBTC": "PUNDIX-BTC", + "PUNDIXUSDT": "PUNDIX-USDT", + "PUSHBTC": "PUSH-BTC", + "PUSHUSDT": "PUSH-USDT", + "PYRBTC": "PYR-BTC", + "PYRUSDT": "PYR-USDT", + "QIBTC": "QI-BTC", + "QIUSDT": "QI-USDT", + "QKCBTC": "QKC-BTC", + "QKCETH": "QKC-ETH", + "QNTUSDT": "QNT-USDT", + "QRDOETH": "QRDO-ETH", + "QRDOUSDT": "QRDO-USDT", + "QTUMBTC": "QTUM-BTC", + "QUICKBTC": "QUICK-BTC", + "QUICKUSDT": "QUICK-USDT", + "RBTCBTC": "RBTC-BTC", + "REAPUSDT": "REAP-USDT", + "REEFBTC": "REEF-BTC", + "REEFUSDT": "REEF-USDT", + "RENUSDT": "REN-USDT", + "REPBTC": "REP-BTC", + "REPETH": "REP-ETH", + "REPUSDT": "REP-USDT", + "REQBTC": "REQ-BTC", + "REQETH": "REQ-ETH", + "REQUSDT": "REQ-USDT", + "REVVBTC": "REVV-BTC", + "REVVUSDT": "REVV-USDT", + "RFOXUSDT": "RFOX-USDT", + "RFUELUSDT": "RFUEL-USDT", + "RIFBTC": "RIF-BTC", + "RLCBTC": "RLC-BTC", + "RLCUSDT": "RLC-USDT", + "RLYUSDT": "RLY-USDT", + "RMRKUSDT": "RMRK-USDT", + "RNDRBTC": "RNDR-BTC", + "RNDRUSDT": "RNDR-USDT", + "ROOBEEBTC": "ROOBEE-BTC", + "ROSEUSDT": "ROSE-USDT", + "ROSNUSDT": "ROSN-USDT", + "ROUTEUSDT": "ROUTE-USDT", + "RSRBTC": "RSR-BTC", + "RSRUSDT": "RSR-USDT", + "RUNEBTC": "RUNE-BTC", + "RUNEUSDT": "RUNE-USDT", + "RUSDT": "R-USDT", + "SAND3LUSDT": "SAND3L-USDT", + "SAND3SUSDT": "SAND3S-USDT", + "SANDUSDT": "SAND-USDT", + "SCLPBTC": "SCLP-BTC", + "SCLPUSDT": "SCLP-USDT", + "SDAOETH": "SDAO-ETH", + "SDAOUSDT": "SDAO-USDT", + "SDNETH": "SDN-ETH", + "SDNUSDT": "SDN-USDT", + "SENSOBTC": "SENSO-BTC", + "SENSOUSDT": "SENSO-USDT", + "SFPBTC": "SFP-BTC", + "SFPUSDT": "SFP-USDT", + "SFUNDUSDT": "SFUND-USDT", + "SHABTC": "SHA-BTC", + "SHAUSDT": "SHA-USDT", + "SHFTBTC": "SHFT-BTC", + "SHFTUSDT": "SHFT-USDT", + "SHIBDOGE": "SHIB-DOGE", + "SHIBUSDT": "SHIB-USDT", + "SHILLUSDT": "SHILL-USDT", + "SHRBTC": "SHR-BTC", + "SHRUSDT": "SHR-USDT", + "SKEYUSDT": "SKEY-USDT", + "SKLBTC": "SKL-BTC", + "SKLUSDT": "SKL-USDT", + "SKUBTC": "SKU-BTC", + "SKUUSDT": "SKU-USDT", + "SLIMUSDT": "SLIM-USDT", + "SLPUSDT": "SLP-USDT", + "SNTBTC": "SNT-BTC", + "SNTETH": "SNT-ETH", + "SNTVTBTC": "SNTVT-BTC", + "SNTVTETH": "SNTVT-ETH", + "SNXBTC": "SNX-BTC", + "SNXETH": "SNX-ETH", + "SNXUSDT": "SNX-USDT", + "SNXUST": "SNX-UST", + "SOL3LUSDT": "SOL3L-USDT", + "SOL3SUSDT": "SOL3S-USDT", + "SOLRUSDT": "SOLR-USDT", + "SOLUSDT": "SOL-USDT", + "SOLUST": "SOL-UST", + "SOLVEBTC": "SOLVE-BTC", + "SOLVEUSDT": "SOLVE-USDT", + "SOULBTC": "SOUL-BTC", + "SOULETH": "SOUL-ETH", + "SOULUSDT": "SOUL-USDT", + "SOVUSDT": "SOV-USDT", + "SPIUSDT": "SPI-USDT", + "SRKBTC": "SRK-BTC", + "SRKUSDT": "SRK-USDT", + "SRMBTC": "SRM-BTC", + "SRMUSDT": "SRM-USDT", + "STCBTC": "STC-BTC", + "STCUSDT": "STC-USDT", + "STMXUSDT": "STMX-USDT", + "STNDETH": "STND-ETH", + "STNDUSDT": "STND-USDT", + "STORJBTC": "STORJ-BTC", + "STORJETH": "STORJ-ETH", + "STORJUSDT": "STORJ-USDT", + "STRKBTC": "STRK-BTC", + "STRKETH": "STRK-ETH", + "STRONGUSDT": "STRONG-USDT", + "STXBTC": "STX-BTC", + "STXUSDT": "STX-USDT", + "SUKUBTC": "SUKU-BTC", + "SUKUUSDT": "SUKU-USDT", + "SUNUSDT": "SUN-USDT", + "SUPERBTC": "SUPER-BTC", + "SUPERUSDT": "SUPER-USDT", + "SUSDBTC": "SUSD-BTC", + "SUSDETH": "SUSD-ETH", + "SUSDUSDT": "SUSD-USDT", + "SUSHI3LUSDT": "SUSHI3L-USDT", + "SUSHI3SUSDT": "SUSHI3S-USDT", + "SUSHIUSDT": "SUSHI-USDT", + "SUTERBTC": "SUTER-BTC", + "SUTERUSDT": "SUTER-USDT", + "SWASHUSDT": "SWASH-USDT", + "SWINGBYBTC": "SWINGBY-BTC", + "SWINGBYUSDT": "SWINGBY-USDT", + "SWPUSDT": "SWP-USDT", + "SXPBTC": "SXP-BTC", + "SXPUSDT": "SXP-USDT", + "SYLOUSDT": "SYLO-USDT", + "TARAETH": "TARA-ETH", + "TARAUSDT": "TARA-USDT", + "TCPUSDT": "TCP-USDT", + "TELBTC": "TEL-BTC", + "TELETH": "TEL-ETH", + "TELUSDT": "TEL-USDT", + "THETAUSDT": "THETA-USDT", + "TIDALUSDT": "TIDAL-USDT", + "TIMEBTC": "TIME-BTC", + "TIMEETH": "TIME-ETH", + "TKOBTC": "TKO-BTC", + "TKOUSDT": "TKO-USDT", + "TKYBTC": "TKY-BTC", + "TKYETH": "TKY-ETH", + "TKYUSDT": "TKY-USDT", + "TLMBTC": "TLM-BTC", + "TLMETH": "TLM-ETH", + "TLMUSDT": "TLM-USDT", + "TLOSBTC": "TLOS-BTC", + "TLOSUSDT": "TLOS-USDT", + "TOKOBTC": "TOKO-BTC", + "TOKOKCS": "TOKO-KCS", + "TOKOUSDT": "TOKO-USDT", + "TOMOBTC": "TOMO-BTC", + "TOMOETH": "TOMO-ETH", + "TOMOUSDT": "TOMO-USDT", + "TONEBTC": "TONE-BTC", + "TONEETH": "TONE-ETH", + "TONEUSDT": "TONE-USDT", + "TOWERBTC": "TOWER-BTC", + "TOWERUSDT": "TOWER-USDT", + "TRACBTC": "TRAC-BTC", + "TRACETH": "TRAC-ETH", + "TRADEBTC": "TRADE-BTC", + "TRADEUSDT": "TRADE-USDT", + "TRBBTC": "TRB-BTC", + "TRBUSDT": "TRB-USDT", + "TRIASBTC": "TRIAS-BTC", + "TRIASUSDT": "TRIAS-USDT", + "TRIBEUSDT": "TRIBE-USDT", + "TRUBTC": "TRU-BTC", + "TRUUSDT": "TRU-USDT", + "TRVLUSDT": "TRVL-USDT", + "TRXBTC": "TRX-BTC", + "TRXETH": "TRX-ETH", + "TRXKCS": "TRX-KCS", + "TRXUSDT": "TRX-USDT", + "TVKBTC": "TVK-BTC", + "TVKUSDT": "TVK-USDT", + "TWTBTC": "TWT-BTC", + "TWTUSDT": "TWT-USDT", + "TXAUSDC": "TXA-USDC", + "TXAUSDT": "TXA-USDT", + "UBXETH": "UBX-ETH", + "UBXTUSDT": "UBXT-USDT", + "UBXUSDT": "UBX-USDT", + "UDOOETH": "UDOO-ETH", + "UFOUSDT": "UFO-USDT", + "UMAUSDT": "UMA-USDT", + "UMBUSDT": "UMB-USDT", + "UNBUSDT": "UNB-USDT", + "UNFIUSDT": "UNFI-USDT", + "UNI3LUSDT": "UNI3L-USDT", + "UNI3SUSDT": "UNI3S-USDT", + "UNICUSDT": "UNIC-USDT", + "UNIKCS": "UNI-KCS", + "UNIUSDT": "UNI-USDT", + "UNOBTC": "UNO-BTC", + "UNOUSDT": "UNO-USDT", + "UOSBTC": "UOS-BTC", + "UOSUSDT": "UOS-USDT", + "UQCBTC": "UQC-BTC", + "UQCETH": "UQC-ETH", + "USDCUSDT": "USDC-USDT", + "USDCUST": "USDC-UST", + "USDJUSDT": "USDJ-USDT", + "USDNUSDT": "USDN-USDT", + "USDTDAI": "USDT-DAI", + "USDTPAX": "USDT-PAX", + "USDTTUSD": "USDT-TUSD", + "USDTUSDC": "USDT-USDC", + "USDTUST": "USDT-UST", + "UTKBTC": "UTK-BTC", + "UTKETH": "UTK-ETH", + "VAIUSDT": "VAI-USDT", + "VEEDBTC": "VEED-BTC", + "VEEDUSDT": "VEED-USDT", + "VEGAETH": "VEGA-ETH", + "VEGAUSDT": "VEGA-USDT", + "VELOUSDT": "VELO-USDT", + "VET3LUSDT": "VET3L-USDT", + "VET3SUSDT": "VET3S-USDT", + "VETBTC": "VET-BTC", + "VETETH": "VET-ETH", + "VETKCS": "VET-KCS", + "VETUSDT": "VET-USDT", + "VIDBTC": "VID-BTC", + "VIDTBTC": "VIDT-BTC", + "VIDTUSDT": "VIDT-USDT", + "VIDUSDT": "VID-USDT", + "VLXBTC": "VLX-BTC", + "VLXUSDT": "VLX-USDT", + "VRABTC": "VRA-BTC", + "VRAUSDT": "VRA-USDT", + "VRUSDT": "VR-USDT", + "VSYSBTC": "VSYS-BTC", + "VSYSUSDT": "VSYS-USDT", + "VXVUSDT": "VXV-USDT", + "WANBTC": "WAN-BTC", + "WANETH": "WAN-ETH", + "WAVESBTC": "WAVES-BTC", + "WAVESUSDT": "WAVES-USDT", + "WAXBTC": "WAX-BTC", + "WAXETH": "WAX-ETH", + "WAXUSDT": "WAX-USDT", + "WBTCBTC": "WBTC-BTC", + "WBTCETH": "WBTC-ETH", + "WESTBTC": "WEST-BTC", + "WESTUSDT": "WEST-USDT", + "WILDUSDT": "WILD-USDT", + "WINBTC": "WIN-BTC", + "WINTRX": "WIN-TRX", + "WINUSDT": "WIN-USDT", + "WNCGBTC": "WNCG-BTC", + "WNCGUSDT": "WNCG-USDT", + "WNXMBTC": "WNXM-BTC", + "WNXMUSDT": "WNXM-USDT", + "WOMUSDT": "WOM-USDT", + "WOOUSDT": "WOO-USDT", + "WRXBTC": "WRX-BTC", + "WRXUSDT": "WRX-USDT", + "WSIENNAUSDT": "WSIENNA-USDT", + "WTCBTC": "WTC-BTC", + "WXTBTC": "WXT-BTC", + "WXTUSDT": "WXT-USDT", + "XAVAUSDT": "XAVA-USDT", + "XCADUSDT": "XCAD-USDT", + "XCHUSDT": "XCH-USDT", + "XCURBTC": "XCUR-BTC", + "XCURUSDT": "XCUR-USDT", + "XDBBTC": "XDB-BTC", + "XDBUSDT": "XDB-USDT", + "XDCBTC": "XDC-BTC", + "XDCETH": "XDC-ETH", + "XDCUSDT": "XDC-USDT", + "XECUSDT": "XEC-USDT", + "XEDBTC": "XED-BTC", + "XEDUSDT": "XED-USDT", + "XEMBTC": "XEM-BTC", + "XEMUSDT": "XEM-USDT", + "XHVBTC": "XHV-BTC", + "XHVUSDT": "XHV-USDT", + "XLMBTC": "XLM-BTC", + "XLMETH": "XLM-ETH", + "XLMKCS": "XLM-KCS", + "XLMUSDT": "XLM-USDT", + "XMRBTC": "XMR-BTC", + "XMRETH": "XMR-ETH", + "XMRUSDT": "XMR-USDT", + "XNLUSDT": "XNL-USDT", + "XPRBTC": "XPR-BTC", + "XPRTUSDT": "XPRT-USDT", + "XPRUSDT": "XPR-USDT", + "XRP3LUSDT": "XRP3L-USDT", + "XRP3SUSDT": "XRP3S-USDT", + "XRPBTC": "XRP-BTC", + "XRPETH": "XRP-ETH", + "XRPKCS": "XRP-KCS", + "XRPPAX": "XRP-PAX", + "XRPTUSD": "XRP-TUSD", + "XRPUSDC": "XRP-USDC", + "XRPUSDT": "XRP-USDT", + "XSRUSDT": "XSR-USDT", + "XTAGUSDT": "XTAG-USDT", + "XTMUSDT": "XTM-USDT", + "XTZBTC": "XTZ-BTC", + "XTZKCS": "XTZ-KCS", + "XTZUSDT": "XTZ-USDT", + "XVSBTC": "XVS-BTC", + "XVSUSDT": "XVS-USDT", + "XYMBTC": "XYM-BTC", + "XYMUSDT": "XYM-USDT", + "XYOBTC": "XYO-BTC", + "XYOETH": "XYO-ETH", + "XYOUSDT": "XYO-USDT", + "YFDAIBTC": "YFDAI-BTC", + "YFDAIUSDT": "YFDAI-USDT", + "YFIUSDT": "YFI-USDT", + "YFIUST": "YFI-UST", + "YGGUSDT": "YGG-USDT", + "YLDUSDT": "YLD-USDT", + "YOPETH": "YOP-ETH", + "YOPUSDT": "YOP-USDT", + "ZCXBTC": "ZCX-BTC", + "ZCXUSDT": "ZCX-USDT", + "ZECBTC": "ZEC-BTC", + "ZECKCS": "ZEC-KCS", + "ZECUSDT": "ZEC-USDT", + "ZEEUSDT": "ZEE-USDT", + "ZENUSDT": "ZEN-USDT", + "ZILBTC": "ZIL-BTC", + "ZILETH": "ZIL-ETH", + "ZILUSDT": "ZIL-USDT", + "ZKTUSDT": "ZKT-USDT", + "ZORTUSDT": "ZORT-USDT", + "ZRXBTC": "ZRX-BTC", + "ZRXETH": "ZRX-ETH", +} + +func toLocalSymbol(symbol string) string { + s, ok := symbolMap[symbol] + if ok { + return s + } + + return symbol +} diff --git a/pkg/exchange/kucoin/testdata/ack.json b/pkg/exchange/kucoin/testdata/ack.json new file mode 100644 index 0000000000..c1a49e590c --- /dev/null +++ b/pkg/exchange/kucoin/testdata/ack.json @@ -0,0 +1,4 @@ +{ + "id": "1640198781304", + "type": "ack" +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/btc-01-account-balance.json b/pkg/exchange/kucoin/testdata/btc-01-account-balance.json new file mode 100644 index 0000000000..ea006c06c1 --- /dev/null +++ b/pkg/exchange/kucoin/testdata/btc-01-account-balance.json @@ -0,0 +1,24 @@ +{ + "id": "61c3728cfd0c3c0001a16a64", + "type": "message", + "topic": "/account/balance", + "userId": "61af6413efeab1000113f08b", + "channelType": "private", + "subject": "account.balance", + "data": { + "accountId": "61b48b6d94ab8d000103ea77", + "available": "42.240598061678", + "availableChange": "-14.099267307125", + "currency": "USDT", + "hold": "14.099267307125", + "holdChange": "14.099267307125", + "relationContext": { + "symbol": "BTC-USDT", + "orderId": "61c3728cfd0c3c0001a16a62" + }, + "relationEvent": "trade.hold", + "relationEventId": "61c3728cfd0c3c0001a16a64", + "time": "1640198796182", + "total": "56.339865368803" + } +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/btc-02-trade-orders.json b/pkg/exchange/kucoin/testdata/btc-02-trade-orders.json new file mode 100644 index 0000000000..8c8f6f5dcb --- /dev/null +++ b/pkg/exchange/kucoin/testdata/btc-02-trade-orders.json @@ -0,0 +1,25 @@ +{ + "type": "message", + "topic": "/spotMarket/tradeOrders", + "userId": "61af6413efeab1000113f08b", + "channelType": "private", + "subject": "orderChange", + "data": { + "symbol": "BTC-USDT", + "orderType": "limit", + "side": "buy", + "orderId": "61c3728cfd0c3c0001a16a62", + "liquidity": "taker", + "type": "match", + "orderTime": 1640198796191168550, + "size": "0.00028975", + "filledSize": "0.00028975", + "price": "48611.5", + "matchPrice": "48604.5", + "matchSize": "0.00028975", + "tradeId": "61c3728c2e113d2923db40a3", + "remainSize": "0", + "status": "match", + "ts": 1640198796191168550 + } +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/btc-03-trade-orders.json b/pkg/exchange/kucoin/testdata/btc-03-trade-orders.json new file mode 100644 index 0000000000..9e03249126 --- /dev/null +++ b/pkg/exchange/kucoin/testdata/btc-03-trade-orders.json @@ -0,0 +1,21 @@ +{ + "type": "message", + "topic": "/spotMarket/tradeOrders", + "userId": "61af6413efeab1000113f08b", + "channelType": "private", + "subject": "orderChange", + "data": { + "symbol": "BTC-USDT", + "orderType": "limit", + "side": "buy", + "orderId": "61c3728cfd0c3c0001a16a62", + "type": "filled", + "orderTime": 1640198796191168550, + "size": "0.00028975", + "filledSize": "0.00028975", + "price": "48611.5", + "remainSize": "0", + "status": "done", + "ts": 1640198796191168550 + } +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/btc-04-account-balance.json b/pkg/exchange/kucoin/testdata/btc-04-account-balance.json new file mode 100644 index 0000000000..7161044280 --- /dev/null +++ b/pkg/exchange/kucoin/testdata/btc-04-account-balance.json @@ -0,0 +1,25 @@ +{ + "id": "61c3728c47d4ea0001c2238a", + "type": "message", + "topic": "/account/balance", + "userId": "61af6413efeab1000113f08b", + "channelType": "private", + "subject": "account.balance", + "data": { + "accountId": "61c1fc287de2940001bd2aac", + "available": "0.00028975", + "availableChange": "0.00028975", + "currency": "BTC", + "hold": "0", + "holdChange": "0", + "relationContext": { + "symbol": "BTC-USDT", + "orderId": "61c3728cfd0c3c0001a16a62", + "tradeId": "61c3728c2e113d2923db40a3" + }, + "relationEvent": "trade.setted", + "relationEventId": "61c3728c47d4ea0001c2238a", + "time": "1640198796230", + "total": "0.00028975" + } +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/cro-01-account-balance.json b/pkg/exchange/kucoin/testdata/cro-01-account-balance.json new file mode 100644 index 0000000000..6928a1151c --- /dev/null +++ b/pkg/exchange/kucoin/testdata/cro-01-account-balance.json @@ -0,0 +1,24 @@ +{ + "id": "61c3f702e5edc90001b0b581", + "type": "message", + "topic": "/account/balance", + "userId": "61af6413efeab1000113f08b", + "channelType": "private", + "subject": "account.balance", + "data": { + "accountId": "61b48b6d94ab8d000103ea77", + "available": "0.000036536828", + "availableChange": "-56.3116335782", + "currency": "USDT", + "hold": "56.3116335782", + "holdChange": "56.3116335782", + "relationContext": { + "symbol": "CRO-USDT", + "orderId": "61c3f702e5edc90001b0b575" + }, + "relationEvent": "trade.hold", + "relationEventId": "61c3f702e5edc90001b0b581", + "time": "1640232706413", + "total": "56.311670115028" + } +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/cro-02-trade-orders.json b/pkg/exchange/kucoin/testdata/cro-02-trade-orders.json new file mode 100644 index 0000000000..16d918a0ec --- /dev/null +++ b/pkg/exchange/kucoin/testdata/cro-02-trade-orders.json @@ -0,0 +1,21 @@ +{ + "type": "message", + "topic": "/spotMarket/tradeOrders", + "userId": "61af6413efeab1000113f08b", + "channelType": "private", + "subject": "orderChange", + "data": { + "symbol": "CRO-USDT", + "orderType": "limit", + "side": "buy", + "orderId": "61c3f702e5edc90001b0b575", + "type": "open", + "orderTime": 1640232706419104233, + "size": "104.5639", + "filledSize": "0", + "price": "0.538", + "remainSize": "104.5639", + "status": "open", + "ts": 1640232706419104233 + } +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/cro-03-trade-orders.json b/pkg/exchange/kucoin/testdata/cro-03-trade-orders.json new file mode 100644 index 0000000000..778ba89907 --- /dev/null +++ b/pkg/exchange/kucoin/testdata/cro-03-trade-orders.json @@ -0,0 +1,25 @@ +{ + "type": "message", + "topic": "/spotMarket/tradeOrders", + "userId": "61af6413efeab1000113f08b", + "channelType": "private", + "subject": "orderChange", + "data": { + "symbol": "CRO-USDT", + "orderType": "limit", + "side": "buy", + "orderId": "61c3f702e5edc90001b0b575", + "liquidity": "maker", + "type": "match", + "orderTime": 1640232706419104233, + "size": "104.5639", + "filledSize": "104.5639", + "price": "0.538", + "matchPrice": "0.538", + "matchSize": "104.5639", + "tradeId": "61c3f7107857782458a39b06", + "remainSize": "0", + "status": "open", + "ts": 1640232720266477485 + } +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/cro-04-trade-orders.json b/pkg/exchange/kucoin/testdata/cro-04-trade-orders.json new file mode 100644 index 0000000000..200311cef3 --- /dev/null +++ b/pkg/exchange/kucoin/testdata/cro-04-trade-orders.json @@ -0,0 +1,21 @@ +{ + "type": "message", + "topic": "/spotMarket/tradeOrders", + "userId": "61af6413efeab1000113f08b", + "channelType": "private", + "subject": "orderChange", + "data": { + "symbol": "CRO-USDT", + "orderType": "limit", + "side": "buy", + "orderId": "61c3f702e5edc90001b0b575", + "type": "filled", + "orderTime": 1640232706419104233, + "size": "104.5639", + "filledSize": "104.5639", + "price": "0.538", + "remainSize": "0", + "status": "done", + "ts": 1640232720266477485 + } +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/cro-05-account-balance.json b/pkg/exchange/kucoin/testdata/cro-05-account-balance.json new file mode 100644 index 0000000000..c05bed4837 --- /dev/null +++ b/pkg/exchange/kucoin/testdata/cro-05-account-balance.json @@ -0,0 +1,25 @@ +{ + "id": "61c3f710506791000143eeef", + "type": "message", + "topic": "/account/balance", + "userId": "61af6413efeab1000113f08b", + "channelType": "private", + "subject": "account.balance", + "data": { + "accountId": "61b48b6d94ab8d000103ea77", + "available": "0.000036536828", + "availableChange": "0", + "currency": "USDT", + "hold": "0", + "holdChange": "-56.3116335782", + "relationContext": { + "symbol": "CRO-USDT", + "orderId": "61c3f702e5edc90001b0b575", + "tradeId": "61c3f7107857782458a39b06" + }, + "relationEvent": "trade.setted", + "relationEventId": "61c3f710506791000143eeef", + "time": "1640232720319", + "total": "0.000036536828" + } +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/cro-06-account-balance.json b/pkg/exchange/kucoin/testdata/cro-06-account-balance.json new file mode 100644 index 0000000000..446dafc8a2 --- /dev/null +++ b/pkg/exchange/kucoin/testdata/cro-06-account-balance.json @@ -0,0 +1,25 @@ +{ + "id": "61c3f710506791000143eeee", + "type": "message", + "topic": "/account/balance", + "userId": "61af6413efeab1000113f08b", + "channelType": "private", + "subject": "account.balance", + "data": { + "accountId": "61c3f710e5756100011faf58", + "available": "104.5639", + "availableChange": "104.5639", + "currency": "CRO", + "hold": "0", + "holdChange": "0", + "relationContext": { + "symbol": "CRO-USDT", + "orderId": "61c3f702e5edc90001b0b575", + "tradeId": "61c3f7107857782458a39b06" + }, + "relationEvent": "trade.setted", + "relationEventId": "61c3f710506791000143eeee", + "time": "1640232720329", + "total": "104.5639" + } +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/cro-07-account-balance.json b/pkg/exchange/kucoin/testdata/cro-07-account-balance.json new file mode 100644 index 0000000000..153cd3b3d3 --- /dev/null +++ b/pkg/exchange/kucoin/testdata/cro-07-account-balance.json @@ -0,0 +1,24 @@ +{ + "id": "61c3f71ce5edc90001b10686", + "type": "message", + "topic": "/account/balance", + "userId": "61af6413efeab1000113f08b", + "channelType": "private", + "subject": "account.balance", + "data": { + "accountId": "61c3f710e5756100011faf58", + "available": "0", + "availableChange": "-104.5639", + "currency": "CRO", + "hold": "104.5639", + "holdChange": "104.5639", + "relationContext": { + "symbol": "CRO-USDT", + "orderId": "61c3f71ce5edc90001b10685" + }, + "relationEvent": "trade.hold", + "relationEventId": "61c3f71ce5edc90001b10686", + "time": "1640232732749", + "total": "104.5639" + } +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/cro-08-trade-orders.json b/pkg/exchange/kucoin/testdata/cro-08-trade-orders.json new file mode 100644 index 0000000000..15a815e623 --- /dev/null +++ b/pkg/exchange/kucoin/testdata/cro-08-trade-orders.json @@ -0,0 +1,21 @@ +{ + "type": "message", + "topic": "/spotMarket/tradeOrders", + "userId": "61af6413efeab1000113f08b", + "channelType": "private", + "subject": "orderChange", + "data": { + "symbol": "CRO-USDT", + "orderType": "limit", + "side": "sell", + "orderId": "61c3f71ce5edc90001b10685", + "type": "open", + "orderTime": 1640232732749540684, + "size": "104.5639", + "filledSize": "0", + "price": "0.5382", + "remainSize": "104.5639", + "status": "open", + "ts": 1640232732749540684 + } +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/cro-09-trade-orders.json b/pkg/exchange/kucoin/testdata/cro-09-trade-orders.json new file mode 100644 index 0000000000..ab59319eee --- /dev/null +++ b/pkg/exchange/kucoin/testdata/cro-09-trade-orders.json @@ -0,0 +1,25 @@ +{ + "type": "message", + "topic": "/spotMarket/tradeOrders", + "userId": "61af6413efeab1000113f08b", + "channelType": "private", + "subject": "orderChange", + "data": { + "symbol": "CRO-USDT", + "orderType": "limit", + "side": "sell", + "orderId": "61c3f71ce5edc90001b10685", + "liquidity": "maker", + "type": "match", + "orderTime": 1640232732749540684, + "size": "104.5639", + "filledSize": "104.5639", + "price": "0.5382", + "matchPrice": "0.5382", + "matchSize": "104.5639", + "tradeId": "61c3f71f7857782458a39b54", + "remainSize": "0", + "status": "open", + "ts": 1640232735930840841 + } +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/cro-10-trade-orders.json b/pkg/exchange/kucoin/testdata/cro-10-trade-orders.json new file mode 100644 index 0000000000..a04d05259c --- /dev/null +++ b/pkg/exchange/kucoin/testdata/cro-10-trade-orders.json @@ -0,0 +1,21 @@ +{ + "type": "message", + "topic": "/spotMarket/tradeOrders", + "userId": "61af6413efeab1000113f08b", + "channelType": "private", + "subject": "orderChange", + "data": { + "symbol": "CRO-USDT", + "orderType": "limit", + "side": "sell", + "orderId": "61c3f71ce5edc90001b10685", + "type": "filled", + "orderTime": 1640232732749540684, + "size": "104.5639", + "filledSize": "104.5639", + "price": "0.5382", + "remainSize": "0", + "status": "done", + "ts": 1640232735930840841 + } +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/cro-11-account-balance.json b/pkg/exchange/kucoin/testdata/cro-11-account-balance.json new file mode 100644 index 0000000000..b4c25491fd --- /dev/null +++ b/pkg/exchange/kucoin/testdata/cro-11-account-balance.json @@ -0,0 +1,25 @@ +{ + "id": "61c3f71fd5ad710001b5c2a1", + "type": "message", + "topic": "/account/balance", + "userId": "61af6413efeab1000113f08b", + "channelType": "private", + "subject": "account.balance", + "data": { + "accountId": "61b48b6d94ab8d000103ea77", + "available": "56.220051225848", + "availableChange": "56.22001468902", + "currency": "USDT", + "hold": "0", + "holdChange": "0", + "relationContext": { + "symbol": "CRO-USDT", + "orderId": "61c3f71ce5edc90001b10685", + "tradeId": "61c3f71f7857782458a39b54" + }, + "relationEvent": "trade.setted", + "relationEventId": "61c3f71fd5ad710001b5c2a1", + "time": "1640232735979", + "total": "56.220051225848" + } +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/cro-12-account-balance.json b/pkg/exchange/kucoin/testdata/cro-12-account-balance.json new file mode 100644 index 0000000000..5386f14fae --- /dev/null +++ b/pkg/exchange/kucoin/testdata/cro-12-account-balance.json @@ -0,0 +1,25 @@ +{ + "id": "61c3f71fd5ad710001b5c2a0", + "type": "message", + "topic": "/account/balance", + "userId": "61af6413efeab1000113f08b", + "channelType": "private", + "subject": "account.balance", + "data": { + "accountId": "61c3f710e5756100011faf58", + "available": "0", + "availableChange": "0", + "currency": "CRO", + "hold": "0", + "holdChange": "-104.5639", + "relationContext": { + "symbol": "CRO-USDT", + "orderId": "61c3f71ce5edc90001b10685", + "tradeId": "61c3f71f7857782458a39b54" + }, + "relationEvent": "trade.setted", + "relationEventId": "61c3f71fd5ad710001b5c2a0", + "time": "1640232735982", + "total": "0" + } +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/testdata/welcome.json b/pkg/exchange/kucoin/testdata/welcome.json new file mode 100644 index 0000000000..0665f1a119 --- /dev/null +++ b/pkg/exchange/kucoin/testdata/welcome.json @@ -0,0 +1,4 @@ +{ + "id": "TuhpZyeoee", + "type": "welcome" +} \ No newline at end of file diff --git a/pkg/exchange/kucoin/websocket.go b/pkg/exchange/kucoin/websocket.go new file mode 100644 index 0000000000..4ec493aada --- /dev/null +++ b/pkg/exchange/kucoin/websocket.go @@ -0,0 +1,158 @@ +package kucoin + +import ( + "encoding/json" + "time" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +type WebSocketMessageType string + +const ( + WebSocketMessageTypePing WebSocketMessageType = "ping" + WebSocketMessageTypeSubscribe WebSocketMessageType = "subscribe" + WebSocketMessageTypeUnsubscribe WebSocketMessageType = "unsubscribe" + WebSocketMessageTypeAck WebSocketMessageType = "ack" + WebSocketMessageTypeError WebSocketMessageType = "error" + WebSocketMessageTypePong WebSocketMessageType = "pong" + WebSocketMessageTypeWelcome WebSocketMessageType = "welcome" + WebSocketMessageTypeMessage WebSocketMessageType = "message" +) + +type WebSocketSubject string + +const ( + WebSocketSubjectTradeTicker WebSocketSubject = "trade.ticker" + WebSocketSubjectTradeSnapshot WebSocketSubject = "trade.snapshot" // ticker snapshot + WebSocketSubjectTradeL2Update WebSocketSubject = "trade.l2update" // order book L2 + WebSocketSubjectLevel2 WebSocketSubject = "level2" // level2 + WebSocketSubjectTradeCandlesUpdate WebSocketSubject = "trade.candles.update" + WebSocketSubjectTradeCandlesAdd WebSocketSubject = "trade.candles.add" + + // private subjects + WebSocketSubjectOrderChange WebSocketSubject = "orderChange" + WebSocketSubjectAccountBalance WebSocketSubject = "account.balance" + WebSocketSubjectStopOrder WebSocketSubject = "stopOrder" +) + +type WebSocketCommand struct { + Id int64 `json:"id"` + Type WebSocketMessageType `json:"type"` + Topic string `json:"topic"` + PrivateChannel bool `json:"privateChannel"` + Response bool `json:"response"` +} + +func (c *WebSocketCommand) JSON() ([]byte, error) { + type tt WebSocketCommand + var a = (*tt)(c) + return json.Marshal(a) +} + +type WebSocketEvent struct { + Type WebSocketMessageType `json:"type"` + Topic string `json:"topic"` + Subject WebSocketSubject `json:"subject"` + Data json.RawMessage `json:"data"` + Code int `json:"code"` // used in type error + + // Object is used for storing the parsed Data + Object interface{} `json:"-"` +} + +type WebSocketTickerEvent struct { + Sequence string `json:"sequence"` + Price fixedpoint.Value `json:"price"` + Size fixedpoint.Value `json:"size"` + BestAsk fixedpoint.Value `json:"bestAsk"` + BestAskSize fixedpoint.Value `json:"bestAskSize"` + BestBid fixedpoint.Value `json:"bestBid"` + BestBidSize fixedpoint.Value `json:"bestBidSize"` +} + +type WebSocketOrderBookL2Event struct { + SequenceStart int64 `json:"sequenceStart"` + SequenceEnd int64 `json:"sequenceEnd"` + Symbol string `json:"symbol"` + Changes struct { + Asks types.PriceVolumeSlice `json:"asks"` + Bids types.PriceVolumeSlice `json:"bids"` + } `json:"changes"` +} + +type WebSocketCandleEvent struct { + Symbol string `json:"symbol"` + Candles []string `json:"candles"` + Time types.MillisecondTimestamp `json:"time"` + + // Interval is an injected field (not from the payload) + Interval types.Interval + + // Is a new candle or not + Add bool +} + +func (e *WebSocketCandleEvent) KLine() types.KLine { + startTime := types.MustParseUnixTimestamp(e.Candles[0]) + openPrice := fixedpoint.MustNewFromString(e.Candles[1]) + closePrice := fixedpoint.MustNewFromString(e.Candles[2]) + highPrice := fixedpoint.MustNewFromString(e.Candles[3]) + lowPrice := fixedpoint.MustNewFromString(e.Candles[4]) + volume := fixedpoint.MustNewFromString(e.Candles[5]) + quoteVolume := fixedpoint.MustNewFromString(e.Candles[6]) + kline := types.KLine{ + Exchange: types.ExchangeKucoin, + Symbol: toGlobalSymbol(e.Symbol), + StartTime: types.Time(startTime), + EndTime: types.Time(startTime.Add(e.Interval.Duration() - time.Millisecond)), + Interval: e.Interval, + Open: openPrice, + Close: closePrice, + High: highPrice, + Low: lowPrice, + Volume: volume, + QuoteVolume: quoteVolume, + Closed: false, + } + return kline +} + +type WebSocketPrivateOrderEvent struct { + OrderId string `json:"orderId"` + TradeId string `json:"tradeId"` + Symbol string `json:"symbol"` + OrderType string `json:"orderType"` + Side string `json:"side"` + Type string `json:"type"` + OrderTime types.NanosecondTimestamp `json:"orderTime"` + Price fixedpoint.Value `json:"price"` + Size fixedpoint.Value `json:"size"` + FilledSize fixedpoint.Value `json:"filledSize"` + RemainSize fixedpoint.Value `json:"remainSize"` + + Liquidity string `json:"liquidity"` + MatchPrice fixedpoint.Value `json:"matchPrice"` + MatchSize fixedpoint.Value `json:"matchSize"` + ClientOid string `json:"clientOid"` + Status string `json:"status"` + Ts types.MillisecondTimestamp `json:"ts"` +} + +type WebSocketAccountBalanceEvent struct { + Total fixedpoint.Value `json:"total"` + Available fixedpoint.Value `json:"available"` + AvailableChange fixedpoint.Value `json:"availableChange"` + Currency string `json:"currency"` + Hold fixedpoint.Value `json:"hold"` + HoldChange fixedpoint.Value `json:"holdChange"` + RelationEvent string `json:"relationEvent"` + RelationEventId string `json:"relationEventId"` + RelationContext struct { + Symbol string `json:"symbol"` + TradeId string `json:"tradeId"` + OrderId string `json:"orderId"` + } `json:"relationContext"` + Time string `json:"time"` +} diff --git a/pkg/exchange/max/client_order_id.go b/pkg/exchange/max/client_order_id.go new file mode 100644 index 0000000000..89836c6a2b --- /dev/null +++ b/pkg/exchange/max/client_order_id.go @@ -0,0 +1,42 @@ +package max + +import ( + "github.com/c9s/bbgo/pkg/types" + "github.com/google/uuid" +) + +// BBGO is a broker on MAX +const spotBrokerID = "bbgo" + +func NewClientOrderID(originalID string, tags ...string) (clientOrderID string) { + // skip blank client order ID + if originalID == types.NoClientOrderID { + return "" + } + + prefix := "x-" + spotBrokerID + "-" + + for _, tag := range tags { + prefix += tag + "-" + } + + prefixLen := len(prefix) + + if originalID != "" { + // try to keep the whole original client order ID if user specifies it. + if prefixLen+len(originalID) > 32 { + return originalID + } + + clientOrderID = prefix + originalID + return clientOrderID + } + + clientOrderID = uuid.New().String() + clientOrderID = prefix + clientOrderID + if len(clientOrderID) > 32 { + return clientOrderID[0:32] + } + + return clientOrderID +} diff --git a/pkg/exchange/max/convert.go b/pkg/exchange/max/convert.go index 6b38daa8a9..dd1c8b39d3 100644 --- a/pkg/exchange/max/convert.go +++ b/pkg/exchange/max/convert.go @@ -2,14 +2,12 @@ package max import ( "fmt" - "strconv" "strings" "time" "github.com/c9s/bbgo/pkg/exchange/max/maxapi" "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" - "github.com/c9s/bbgo/pkg/util" ) func toGlobalCurrency(currency string) string { @@ -48,31 +46,50 @@ func toGlobalSideType(v string) types.SideType { return types.SideType(v) } -func toGlobalOrderStatus(orderStatus max.OrderState, executedVolume, remainingVolume fixedpoint.Value) types.OrderStatus { +func toGlobalRewards(maxRewards []max.Reward) ([]types.Reward, error) { + // convert to global reward + var rewards []types.Reward + for _, r := range maxRewards { + // ignore "accepted" + if r.State != "done" { + continue + } + + reward, err := r.Reward() + if err != nil { + return nil, err + } + + rewards = append(rewards, *reward) + } + + return rewards, nil +} - switch orderStatus { +func toGlobalOrderStatus(orderState max.OrderState, executedVolume, remainingVolume fixedpoint.Value) types.OrderStatus { + switch orderState { case max.OrderStateCancel: return types.OrderStatusCanceled case max.OrderStateFinalizing, max.OrderStateDone: - if executedVolume > 0 && remainingVolume > 0 { - return types.OrderStatusPartiallyFilled - } else if remainingVolume == 0 { + if executedVolume.IsZero() { + return types.OrderStatusCanceled + } else if remainingVolume.IsZero() { return types.OrderStatusFilled } return types.OrderStatusFilled case max.OrderStateWait: - if executedVolume > 0 && remainingVolume > 0 { + if executedVolume.Sign() > 0 && remainingVolume.Sign() > 0 { return types.OrderStatusPartiallyFilled } return types.OrderStatusNew case max.OrderStateConvert: - if executedVolume > 0 && remainingVolume > 0 { + if executedVolume.Sign() > 0 && remainingVolume.Sign() > 0 { return types.OrderStatusPartiallyFilled } @@ -83,8 +100,8 @@ func toGlobalOrderStatus(orderStatus max.OrderState, executedVolume, remainingVo } - logger.Errorf("unknown order status: %v", orderStatus) - return types.OrderStatus(orderStatus) + log.Errorf("unknown order status: %v", orderState) + return types.OrderStatus(orderState) } func toGlobalOrderType(orderType max.OrderType) types.OrderType { @@ -101,9 +118,15 @@ func toGlobalOrderType(orderType max.OrderType) types.OrderType { case max.OrderTypeStopMarket: return types.OrderTypeStopMarket + case max.OrderTypeIOCLimit: + return types.OrderTypeLimit + + case max.OrderTypePostOnly: + return types.OrderTypeLimitMaker + } - logger.Errorf("unknown order type: %v", orderType) + log.Errorf("order convert error, unknown order type: %v", orderType) return types.OrderType(orderType) } @@ -116,6 +139,9 @@ func toLocalOrderType(orderType types.OrderType) (max.OrderType, error) { case types.OrderTypeStopMarket: return max.OrderTypeStopMarket, nil + case types.OrderTypeLimitMaker: + return max.OrderTypePostOnly, nil + case types.OrderTypeLimit: return max.OrderTypeLimit, nil @@ -140,15 +166,9 @@ func toGlobalOrders(maxOrders []max.Order) (orders []types.Order, err error) { } func toGlobalOrder(maxOrder max.Order) (*types.Order, error) { - executedVolume, err := fixedpoint.NewFromString(maxOrder.ExecutedVolume) - if err != nil { - return nil, err - } - - remainingVolume, err := fixedpoint.NewFromString(maxOrder.RemainingVolume) - if err != nil { - return nil, err - } + executedVolume := maxOrder.ExecutedVolume + remainingVolume := maxOrder.RemainingVolume + isMargin := maxOrder.WalletType == max.WalletTypeMargin return &types.Order{ SubmitOrder: types.SubmitOrder{ @@ -156,82 +176,131 @@ func toGlobalOrder(maxOrder max.Order) (*types.Order, error) { Symbol: toGlobalSymbol(maxOrder.Market), Side: toGlobalSideType(maxOrder.Side), Type: toGlobalOrderType(maxOrder.OrderType), - Quantity: util.MustParseFloat(maxOrder.Volume), - Price: util.MustParseFloat(maxOrder.Price), - TimeInForce: "GTC", // MAX only supports GTC + Quantity: maxOrder.Volume, + Price: maxOrder.Price, + TimeInForce: types.TimeInForceGTC, // MAX only supports GTC GroupID: maxOrder.GroupID, }, - Exchange: types.ExchangeMax.String(), - IsWorking: maxOrder.State == "wait", + Exchange: types.ExchangeMax, + IsWorking: maxOrder.State == max.OrderStateWait, OrderID: maxOrder.ID, Status: toGlobalOrderStatus(maxOrder.State, executedVolume, remainingVolume), - ExecutedQuantity: executedVolume.Float64(), - CreationTime: maxOrder.CreatedAt, - UpdateTime: maxOrder.CreatedAt, + ExecutedQuantity: executedVolume, + CreationTime: types.Time(maxOrder.CreatedAt.Time()), + UpdateTime: types.Time(maxOrder.CreatedAt.Time()), + IsMargin: isMargin, + IsIsolated: false, // isolated margin is not supported }, nil } func toGlobalTrade(t max.Trade) (*types.Trade, error) { + isMargin := t.WalletType == max.WalletTypeMargin + side := toGlobalSideType(t.Side) + return &types.Trade{ + ID: t.ID, + OrderID: t.OrderID, + Price: t.Price, + Symbol: toGlobalSymbol(t.Market), + Exchange: types.ExchangeMax, + Quantity: t.Volume, + Side: side, + IsBuyer: t.IsBuyer(), + IsMaker: t.IsMaker(), + Fee: t.Fee, + FeeCurrency: toGlobalCurrency(t.FeeCurrency), + QuoteQuantity: t.Funds, + Time: types.Time(t.CreatedAt), + IsMargin: isMargin, + IsIsolated: false, + IsFutures: false, + }, nil +} + +func toGlobalDepositStatus(a string) types.DepositStatus { + switch a { + case "submitting", "submitted", "checking": + return types.DepositPending + + case "accepted": + return types.DepositSuccess + + case "rejected": + return types.DepositRejected + + case "canceled": + return types.DepositCancelled + + case "suspect", "refunded": + + } + + return types.DepositStatus(a) +} + +func convertWebSocketTrade(t max.TradeUpdate) (*types.Trade, error) { // skip trade ID that is the same. however this should not happen var side = toGlobalSideType(t.Side) // trade time - mts := time.Unix(0, t.CreatedAtMilliSeconds*int64(time.Millisecond)) + mts := time.Unix(0, t.Timestamp*int64(time.Millisecond)) - price, err := strconv.ParseFloat(t.Price, 64) + price, err := fixedpoint.NewFromString(t.Price) if err != nil { return nil, err } - quantity, err := strconv.ParseFloat(t.Volume, 64) + quantity, err := fixedpoint.NewFromString(t.Volume) if err != nil { return nil, err } - quoteQuantity, err := strconv.ParseFloat(t.Funds, 64) - if err != nil { - return nil, err - } + quoteQuantity := price.Mul(quantity) - fee, err := strconv.ParseFloat(t.Fee, 64) + fee, err := fixedpoint.NewFromString(t.Fee) if err != nil { return nil, err } return &types.Trade{ - ID: int64(t.ID), + ID: t.ID, OrderID: t.OrderID, - Price: price, Symbol: toGlobalSymbol(t.Market), - Exchange: "max", + Exchange: types.ExchangeMax, + Price: price, Quantity: quantity, Side: side, - IsBuyer: t.IsBuyer(), - IsMaker: t.IsMaker(), + IsBuyer: side == types.SideTypeBuy, + IsMaker: t.Maker, Fee: fee, FeeCurrency: toGlobalCurrency(t.FeeCurrency), QuoteQuantity: quoteQuantity, - Time: mts, + Time: types.Time(mts), }, nil } -func toGlobalDepositStatus(a string) types.DepositStatus { - switch a { - case "submitting", "submitted", "checking": - return types.DepositPending - - case "accepted": - return types.DepositSuccess - - case "rejected": - return types.DepositRejected - - case "canceled": - return types.DepositCancelled - - case "suspect", "refunded": - +func convertWebSocketOrderUpdate(u max.OrderUpdate) (*types.Order, error) { + timeInForce := types.TimeInForceGTC + if u.OrderType == max.OrderTypeIOCLimit { + timeInForce = types.TimeInForceIOC } - return types.DepositStatus(a) + return &types.Order{ + SubmitOrder: types.SubmitOrder{ + ClientOrderID: u.ClientOID, + Symbol: toGlobalSymbol(u.Market), + Side: toGlobalSideType(u.Side), + Type: toGlobalOrderType(u.OrderType), + Quantity: u.Volume, + Price: u.Price, + StopPrice: u.StopPrice, + TimeInForce: timeInForce, // MAX only supports GTC + GroupID: u.GroupID, + }, + Exchange: types.ExchangeMax, + OrderID: u.ID, + Status: toGlobalOrderStatus(u.State, u.ExecutedVolume, u.RemainingVolume), + ExecutedQuantity: u.ExecutedVolume, + CreationTime: types.Time(time.Unix(0, u.CreatedAtMs*int64(time.Millisecond))), + UpdateTime: types.Time(time.Unix(0, u.CreatedAtMs*int64(time.Millisecond))), + }, nil } diff --git a/pkg/exchange/max/exchange.go b/pkg/exchange/max/exchange.go index a39faac4e1..79a7076acb 100644 --- a/pkg/exchange/max/exchange.go +++ b/pkg/exchange/max/exchange.go @@ -5,23 +5,37 @@ import ( "fmt" "math" "os" + "sort" + "strconv" "time" - "github.com/google/uuid" "github.com/pkg/errors" "github.com/sirupsen/logrus" + "go.uber.org/multierr" + "golang.org/x/time/rate" maxapi "github.com/c9s/bbgo/pkg/exchange/max/maxapi" + v3 "github.com/c9s/bbgo/pkg/exchange/max/maxapi/v3" "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" - "github.com/c9s/bbgo/pkg/util" ) +// closedOrderQueryLimiter is used for the closed orders query rate limit, 1 request per second +var closedOrderQueryLimiter = rate.NewLimiter(rate.Every(1*time.Second), 1) +var tradeQueryLimiter = rate.NewLimiter(rate.Every(3*time.Second), 1) +var accountQueryLimiter = rate.NewLimiter(rate.Every(3*time.Second), 1) +var marketDataLimiter = rate.NewLimiter(rate.Every(2*time.Second), 10) + var log = logrus.WithField("exchange", "max") type Exchange struct { - client *maxapi.RestClient + types.MarginSettings + key, secret string + client *maxapi.RestClient + + v3order *v3.OrderService + v3margin *v3.MarginService } func New(key, secret string) *Exchange { @@ -35,7 +49,10 @@ func New(key, secret string) *Exchange { return &Exchange{ client: client, key: key, - secret: secret, + // pragma: allowlist nextline secret + secret: secret, + v3order: &v3.OrderService{Client: client}, + v3margin: &v3.MarginService{Client: client}, } } @@ -43,6 +60,70 @@ func (e *Exchange) Name() types.ExchangeName { return types.ExchangeMax } +func (e *Exchange) QueryTicker(ctx context.Context, symbol string) (*types.Ticker, error) { + ticker, err := e.client.PublicService.Ticker(toLocalSymbol(symbol)) + if err != nil { + return nil, err + } + + return &types.Ticker{ + Time: ticker.Time, + Volume: fixedpoint.MustNewFromString(ticker.Volume), + Last: fixedpoint.MustNewFromString(ticker.Last), + Open: fixedpoint.MustNewFromString(ticker.Open), + High: fixedpoint.MustNewFromString(ticker.High), + Low: fixedpoint.MustNewFromString(ticker.Low), + Buy: fixedpoint.MustNewFromString(ticker.Buy), + Sell: fixedpoint.MustNewFromString(ticker.Sell), + }, nil +} + +func (e *Exchange) QueryTickers(ctx context.Context, symbol ...string) (map[string]types.Ticker, error) { + if err := marketDataLimiter.Wait(ctx); err != nil { + return nil, err + } + + var tickers = make(map[string]types.Ticker) + if len(symbol) == 1 { + ticker, err := e.QueryTicker(ctx, symbol[0]) + if err != nil { + return nil, err + } + + tickers[toGlobalSymbol(symbol[0])] = *ticker + } else { + + maxTickers, err := e.client.PublicService.Tickers() + if err != nil { + return nil, err + } + + m := make(map[string]struct{}) + exists := struct{}{} + for _, s := range symbol { + m[toGlobalSymbol(s)] = exists + } + + for k, v := range maxTickers { + if _, ok := m[toGlobalSymbol(k)]; len(symbol) != 0 && !ok { + continue + } + tickers[toGlobalSymbol(k)] = types.Ticker{ + Time: v.Time, + Volume: fixedpoint.MustNewFromString(v.Volume), + Last: fixedpoint.MustNewFromString(v.Last), + Open: fixedpoint.MustNewFromString(v.Open), + High: fixedpoint.MustNewFromString(v.High), + Low: fixedpoint.MustNewFromString(v.Low), + Buy: fixedpoint.MustNewFromString(v.Buy), + Sell: fixedpoint.MustNewFromString(v.Sell), + } + } + } + + return tickers, nil +} + func (e *Exchange) QueryMarkets(ctx context.Context) (types.MarketMap, error) { log.Info("querying market info...") @@ -57,18 +138,22 @@ func (e *Exchange) QueryMarkets(ctx context.Context) (types.MarketMap, error) { market := types.Market{ Symbol: symbol, + LocalSymbol: m.ID, PricePrecision: m.QuoteUnitPrecision, VolumePrecision: m.BaseUnitPrecision, QuoteCurrency: toGlobalCurrency(m.QuoteUnit), BaseCurrency: toGlobalCurrency(m.BaseUnit), MinNotional: m.MinQuoteAmount, MinAmount: m.MinQuoteAmount, - MinLot: 1.0 / math.Pow10(m.BaseUnitPrecision), // make it like 0.0001 - MinQuantity: m.MinBaseAmount, - MaxQuantity: 10000.0, - MinPrice: 1.0 / math.Pow10(m.QuoteUnitPrecision), // used in the price formatter - MaxPrice: 10000.0, - TickSize: 1.0 / math.Pow10(m.QuoteUnitPrecision), + + MinQuantity: m.MinBaseAmount, + MaxQuantity: fixedpoint.NewFromInt(10000), + // make it like 0.0001 + StepSize: fixedpoint.NewFromFloat(1.0 / math.Pow10(m.BaseUnitPrecision)), + // used in the price formatter + MinPrice: fixedpoint.NewFromFloat(1.0 / math.Pow10(m.QuoteUnitPrecision)), + MaxPrice: fixedpoint.NewFromInt(10000), + TickSize: fixedpoint.NewFromFloat(1.0 / math.Pow10(m.QuoteUnitPrecision)), } markets[symbol] = market @@ -78,11 +163,33 @@ func (e *Exchange) QueryMarkets(ctx context.Context) (types.MarketMap, error) { } func (e *Exchange) NewStream() types.Stream { - return NewStream(e.key, e.secret) + stream := NewStream(e.key, e.secret) + stream.MarginSettings = e.MarginSettings + return stream +} + +func (e *Exchange) QueryOrder(ctx context.Context, q types.OrderQuery) (*types.Order, error) { + orderID, err := strconv.ParseInt(q.OrderID, 10, 64) + if err != nil { + return nil, err + } + + maxOrder, err := e.v3order.NewGetOrderRequest().Id(uint64(orderID)).Do(ctx) + if err != nil { + return nil, err + } + + return toGlobalOrder(*maxOrder) } func (e *Exchange) QueryOpenOrders(ctx context.Context, symbol string) (orders []types.Order, err error) { - maxOrders, err := e.client.OrderService.Open(toLocalSymbol(symbol), maxapi.QueryOrderOptions{}) + market := toLocalSymbol(symbol) + walletType := maxapi.WalletTypeSpot + if e.MarginSettings.IsMargin { + walletType = maxapi.WalletTypeMargin + } + + maxOrders, err := e.v3order.NewGetWalletOpenOrdersRequest(walletType).Market(market).Do(ctx) if err != nil { return orders, err } @@ -100,55 +207,57 @@ func (e *Exchange) QueryOpenOrders(ctx context.Context, symbol string) (orders [ } // lastOrderID is not supported on MAX -func (e *Exchange) QueryClosedOrders(ctx context.Context, symbol string, since, until time.Time, lastOrderID uint64) (orders []types.Order, err error) { - numBatches := 5 - limit := 1000 // max limit = 1000 - offset := limit * numBatches - orderIDs := make(map[uint64]struct{}, limit*2) - - for ; offset > 0; offset -= limit { - log.Infof("querying %s closed orders offset %d ~ ", symbol, offset) - - maxOrders, err := e.client.OrderService.Closed(toLocalSymbol(symbol), maxapi.QueryOrderOptions{ - Offset: offset, - Limit: limit, - }) - if err != nil { - return orders, err - } +func (e *Exchange) QueryClosedOrders(ctx context.Context, symbol string, since, until time.Time, lastOrderID uint64) ([]types.Order, error) { + log.Warn("!!!MAX EXCHANGE API NOTICE!!!") + log.Warn("the since/until conditions will not be effected on closed orders query, max exchange does not support time-range-based query") + return e.queryClosedOrdersByLastOrderID(ctx, symbol, lastOrderID) +} - if len(maxOrders) == 0 { - break - } +func (e *Exchange) queryClosedOrdersByLastOrderID(ctx context.Context, symbol string, lastOrderID uint64) (orders []types.Order, err error) { + if err := closedOrderQueryLimiter.Wait(ctx); err != nil { + return orders, err + } - for _, maxOrder := range maxOrders { - if maxOrder.CreatedAt.Before(since) { - continue - } + market := toLocalSymbol(symbol) + walletType := maxapi.WalletTypeSpot + if e.MarginSettings.IsMargin { + walletType = maxapi.WalletTypeMargin + } - if maxOrder.CreatedAt.After(until) { - return orders, err - } + req := e.v3order.NewGetWalletOrderHistoryRequest(walletType).Market(market) + if lastOrderID == 0 { + lastOrderID = 1 + } - order, err := toGlobalOrder(maxOrder) - if err != nil { - return orders, err - } + req.FromID(lastOrderID) + req.Limit(1000) - if _, ok := orderIDs[order.OrderID]; ok { - log.Infof("skipping duplicated order: %d", order.OrderID) - } + maxOrders, err := req.Do(ctx) + if err != nil { + return orders, err + } - orderIDs[order.OrderID] = struct{}{} - orders = append(orders, *order) + for _, maxOrder := range maxOrders { + order, err2 := toGlobalOrder(maxOrder) + if err2 != nil { + err = multierr.Append(err, err2) + continue } + + orders = append(orders, *order) } - return orders, err + orders = types.SortOrdersAscending(orders) + return orders, nil } func (e *Exchange) CancelAllOrders(ctx context.Context) ([]types.Order, error) { - var req = e.client.OrderService.NewOrderCancelAllRequest() + walletType := maxapi.WalletTypeSpot + if e.MarginSettings.IsMargin { + walletType = maxapi.WalletTypeMargin + } + + req := e.v3order.NewCancelWalletOrderAllRequest(walletType) var maxOrders, err = req.Do(ctx) if err != nil { return nil, err @@ -158,10 +267,16 @@ func (e *Exchange) CancelAllOrders(ctx context.Context) ([]types.Order, error) { } func (e *Exchange) CancelOrdersBySymbol(ctx context.Context, symbol string) ([]types.Order, error) { - var req = e.client.OrderService.NewOrderCancelAllRequest() - req.Market(toLocalSymbol(symbol)) + market := toLocalSymbol(symbol) + walletType := maxapi.WalletTypeSpot + if e.MarginSettings.IsMargin { + walletType = maxapi.WalletTypeMargin + } - var maxOrders, err = req.Do(ctx) + req := e.v3order.NewCancelWalletOrderAllRequest(walletType) + req.Market(market) + + maxOrders, err := req.Do(ctx) if err != nil { return nil, err } @@ -169,11 +284,16 @@ func (e *Exchange) CancelOrdersBySymbol(ctx context.Context, symbol string) ([]t return toGlobalOrders(maxOrders) } -func (e *Exchange) CancelOrdersByGroupID(ctx context.Context, groupID int64) ([]types.Order, error) { - var req = e.client.OrderService.NewOrderCancelAllRequest() +func (e *Exchange) CancelOrdersByGroupID(ctx context.Context, groupID uint32) ([]types.Order, error) { + walletType := maxapi.WalletTypeSpot + if e.MarginSettings.IsMargin { + walletType = maxapi.WalletTypeMargin + } + + req := e.v3order.NewCancelWalletOrderAllRequest(walletType) req.GroupID(groupID) - var maxOrders, err = req.Do(ctx) + maxOrders, err := req.Do(ctx) if err != nil { return nil, err } @@ -182,7 +302,12 @@ func (e *Exchange) CancelOrdersByGroupID(ctx context.Context, groupID int64) ([] } func (e *Exchange) CancelOrders(ctx context.Context, orders ...types.Order) (err2 error) { - var groupIDs = make(map[int64]struct{}) + walletType := maxapi.WalletTypeSpot + if e.MarginSettings.IsMargin { + walletType = maxapi.WalletTypeMargin + } + + var groupIDs = make(map[uint32]struct{}) var orphanOrders []types.Order for _, o := range orders { if o.GroupID > 0 { @@ -194,7 +319,7 @@ func (e *Exchange) CancelOrders(ctx context.Context, orders ...types.Order) (err if len(groupIDs) > 0 { for groupID := range groupIDs { - var req = e.client.OrderService.NewOrderCancelAllRequest() + req := e.v3order.NewCancelWalletOrderAllRequest(walletType) req.GroupID(groupID) if _, err := req.Do(ctx); err != nil { @@ -205,16 +330,16 @@ func (e *Exchange) CancelOrders(ctx context.Context, orders ...types.Order) (err } for _, o := range orphanOrders { - var req = e.client.OrderService.NewOrderCancelRequest() + req := e.v3order.NewCancelOrderRequest() if o.OrderID > 0 { - req.ID(o.OrderID) - } else if len(o.ClientOrderID) > 0 { + req.Id(o.OrderID) + } else if len(o.ClientOrderID) > 0 && o.ClientOrderID != types.NoClientOrderID { req.ClientOrderID(o.ClientOrderID) } else { return fmt.Errorf("order id or client order id is not defined, order=%+v", o) } - if err := req.Do(ctx); err != nil { + if _, err := req.Do(ctx); err != nil { log.WithError(err).Errorf("order cancel error") err2 = err } @@ -223,43 +348,169 @@ func (e *Exchange) CancelOrders(ctx context.Context, orders ...types.Order) (err return err2 } +func toMaxSubmitOrder(o types.SubmitOrder) (*maxapi.SubmitOrder, error) { + symbol := toLocalSymbol(o.Symbol) + orderType, err := toLocalOrderType(o.Type) + if err != nil { + return nil, err + } + + // case IOC type + if orderType == maxapi.OrderTypeLimit && o.TimeInForce == types.TimeInForceIOC { + orderType = maxapi.OrderTypeIOCLimit + } + + var quantityString string + if o.Market.Symbol != "" { + quantityString = o.Market.FormatQuantity(o.Quantity) + } else { + quantityString = o.Quantity.String() + } + + maxOrder := maxapi.SubmitOrder{ + Market: symbol, + Side: toLocalSideType(o.Side), + OrderType: orderType, + Volume: quantityString, + } + + if o.GroupID > 0 { + maxOrder.GroupID = o.GroupID + } + + clientOrderID := NewClientOrderID(o.ClientOrderID) + if len(clientOrderID) > 0 { + maxOrder.ClientOID = clientOrderID + } + + switch o.Type { + case types.OrderTypeStopLimit, types.OrderTypeLimit, types.OrderTypeLimitMaker: + var priceInString string + if o.Market.Symbol != "" { + priceInString = o.Market.FormatPrice(o.Price) + } else { + priceInString = o.Price.String() + } + maxOrder.Price = priceInString + } + + // set stop price field for limit orders + switch o.Type { + case types.OrderTypeStopLimit, types.OrderTypeStopMarket: + var priceInString string + if o.Market.Symbol != "" { + priceInString = o.Market.FormatPrice(o.StopPrice) + } else { + priceInString = o.StopPrice.String() + } + maxOrder.StopPrice = priceInString + } + + return &maxOrder, nil +} + +func (e *Exchange) Withdraw(ctx context.Context, asset string, amount fixedpoint.Value, address string, options *types.WithdrawalOptions) error { + asset = toLocalCurrency(asset) + + addresses, err := e.client.WithdrawalService.NewGetWithdrawalAddressesRequest(). + Currency(asset). + Do(ctx) + + if err != nil { + return err + } + + var whitelistAddress maxapi.WithdrawalAddress + for _, a := range addresses { + if a.Address == address { + whitelistAddress = a + break + } + } + + if whitelistAddress.Address != address { + return fmt.Errorf("address %s is not in the whitelist", address) + } + + if whitelistAddress.UUID == "" { + return errors.New("address UUID can not be empty") + } + + response, err := e.client.WithdrawalService.NewWithdrawalRequest(). + Currency(asset). + Amount(amount.Float64()). + AddressUUID(whitelistAddress.UUID). + Do(ctx) + + if err != nil { + return err + } + + log.Infof("withdrawal request response: %+v", response) + return nil +} + func (e *Exchange) SubmitOrders(ctx context.Context, orders ...types.SubmitOrder) (createdOrders types.OrderSlice, err error) { - for _, order := range orders { - orderType, err := toLocalOrderType(order.Type) + walletType := maxapi.WalletTypeSpot + if e.MarginSettings.IsMargin { + walletType = maxapi.WalletTypeMargin + } + + for _, o := range orders { + orderType, err := toLocalOrderType(o.Type) if err != nil { return createdOrders, err } - req := e.client.OrderService.NewCreateOrderRequest(). - Market(toLocalSymbol(order.Symbol)). - OrderType(string(orderType)). - Side(toLocalSideType(order.Side)). - Volume(order.QuantityString) + // case IOC type + if orderType == maxapi.OrderTypeLimit && o.TimeInForce == types.TimeInForceIOC { + orderType = maxapi.OrderTypeIOCLimit + } - if len(order.ClientOrderID) > 0 { - req.ClientOrderID(order.ClientOrderID) + var quantityString string + if o.Market.Symbol != "" { + quantityString = o.Market.FormatQuantity(o.Quantity) } else { - clientOrderID := uuid.New().String() - req.ClientOrderID(clientOrderID) + quantityString = o.Quantity.String() } - switch order.Type { - case types.OrderTypeStopLimit, types.OrderTypeStopMarket: - if len(order.StopPriceString) == 0 { - return createdOrders, fmt.Errorf("stop price string can not be empty") - } + clientOrderID := NewClientOrderID(o.ClientOrderID) - req.StopPrice(order.StopPriceString) + req := e.v3order.NewCreateWalletOrderRequest(walletType) + req.Market(toLocalSymbol(o.Symbol)). + Side(toLocalSideType(o.Side)). + Volume(quantityString). + OrderType(string(orderType)). + ClientOrderID(clientOrderID) + + switch o.Type { + case types.OrderTypeStopLimit, types.OrderTypeLimit, types.OrderTypeLimitMaker: + var priceInString string + if o.Market.Symbol != "" { + priceInString = o.Market.FormatPrice(o.Price) + } else { + priceInString = o.Price.String() + } + req.Price(priceInString) } - if len(order.PriceString) > 0 { - req.Price(order.PriceString) + // set stop price field for limit orders + switch o.Type { + case types.OrderTypeStopLimit, types.OrderTypeStopMarket: + var priceInString string + if o.Market.Symbol != "" { + priceInString = o.Market.FormatPrice(o.StopPrice) + } else { + priceInString = o.StopPrice.String() + } + req.StopPrice(priceInString) } retOrder, err := req.Do(ctx) if err != nil { return createdOrders, err } + if retOrder == nil { return createdOrders, errors.New("returned nil order") } @@ -280,36 +531,106 @@ func (e *Exchange) PlatformFeeCurrency() string { return toGlobalCurrency("max") } -func (e *Exchange) QueryAccount(ctx context.Context) (*types.Account, error) { - userInfo, err := e.client.AccountService.Me() +func (e *Exchange) getLaunchDate() (time.Time, error) { + // MAX launch date June 21th, 2018 + loc, err := time.LoadLocation("Asia/Taipei") if err != nil { + return time.Time{}, err + } + + return time.Date(2018, time.June, 21, 0, 0, 0, 0, loc), nil +} + +func (e *Exchange) QueryAccount(ctx context.Context) (*types.Account, error) { + if err := accountQueryLimiter.Wait(ctx); err != nil { return nil, err } - var balances = make(types.BalanceMap) - for _, a := range userInfo.Accounts { - balances[toGlobalCurrency(a.Currency)] = types.Balance{ - Currency: toGlobalCurrency(a.Currency), - Available: fixedpoint.Must(fixedpoint.NewFromString(a.Balance)), - Locked: fixedpoint.Must(fixedpoint.NewFromString(a.Locked)), - } + vipLevel, err := e.client.AccountService.NewGetVipLevelRequest().Do(ctx) + if err != nil { + return nil, err } + // MAX returns the fee rate in the following format: + // "maker_fee": 0.0005 -> 0.05% + // "taker_fee": 0.0015 -> 0.15% + a := &types.Account{ - MakerCommission: 15, // 0.15% - TakerCommission: 15, // 0.15% + AccountType: types.AccountTypeSpot, + MarginLevel: fixedpoint.Zero, + MakerFeeRate: fixedpoint.NewFromFloat(vipLevel.Current.MakerFee), // 0.15% = 0.0015 + TakerFeeRate: fixedpoint.NewFromFloat(vipLevel.Current.TakerFee), // 0.15% = 0.0015 } + balances, err := e.QueryAccountBalances(ctx) + if err != nil { + return nil, err + } a.UpdateBalances(balances) + + if e.MarginSettings.IsMargin { + a.AccountType = types.AccountTypeMargin + + req := e.v3margin.NewGetMarginADRatioRequest() + adRatio, err := req.Do(ctx) + if err != nil { + return a, err + } + + a.MarginLevel = adRatio.AdRatio + a.TotalAccountValue = adRatio.AssetInUsdt + } + return a, nil } +func (e *Exchange) QueryAccountBalances(ctx context.Context) (types.BalanceMap, error) { + if err := accountQueryLimiter.Wait(ctx); err != nil { + return nil, err + } + + walletType := maxapi.WalletTypeSpot + if e.MarginSettings.IsMargin { + walletType = maxapi.WalletTypeMargin + } + + req := e.v3order.NewGetWalletAccountsRequest(walletType) + accounts, err := req.Do(ctx) + if err != nil { + return nil, err + } + + var balances = make(types.BalanceMap) + for _, b := range accounts { + cur := toGlobalCurrency(b.Currency) + balances[cur] = types.Balance{ + Currency: cur, + Available: b.Balance, + Locked: b.Locked, + NetAsset: b.Balance.Add(b.Locked).Sub(b.Debt), + Borrowed: b.Debt, // TODO: Replace this with borrow in the newer version + Interest: b.Interest, + } + } + + return balances, nil +} + func (e *Exchange) QueryWithdrawHistory(ctx context.Context, asset string, since, until time.Time) (allWithdraws []types.Withdraw, err error) { startTime := since + limit := 1000 txIDs := map[string]struct{}{} + emptyTime := time.Time{} + if startTime == emptyTime { + startTime, err = e.getLaunchDate() + if err != nil { + return nil, err + } + } + for startTime.Before(until) { - // startTime ~ endTime must be in 90 days + // startTime ~ endTime must be in 60 days endTime := startTime.AddDate(0, 0, 60) if endTime.After(until) { endTime = until @@ -324,13 +645,20 @@ func (e *Exchange) QueryWithdrawHistory(ctx context.Context, asset string, since withdraws, err := req. From(startTime.Unix()). To(endTime.Unix()). + Limit(limit). Do(ctx) if err != nil { return allWithdraws, err } - for _, d := range withdraws { + if len(withdraws) == 0 { + startTime = endTime + continue + } + + for i := len(withdraws) - 1; i >= 0; i-- { + d := withdraws[i] if _, ok := txIDs[d.TxID]; ok { continue } @@ -355,21 +683,30 @@ func (e *Exchange) QueryWithdrawHistory(ctx context.Context, asset string, since } txIDs[d.TxID] = struct{}{} - allWithdraws = append(allWithdraws, types.Withdraw{ - ApplyTime: time.Unix(d.CreatedAt, 0), - Asset: toGlobalCurrency(d.Currency), - Amount: util.MustParseFloat(d.Amount), - Address: "", - AddressTag: "", - TransactionID: d.TxID, - TransactionFee: util.MustParseFloat(d.Fee), + withdraw := types.Withdraw{ + Exchange: types.ExchangeMax, + ApplyTime: types.Time(time.Unix(d.CreatedAt, 0)), + Asset: toGlobalCurrency(d.Currency), + Amount: d.Amount, + Address: "", + AddressTag: "", + TransactionID: d.TxID, + TransactionFee: d.Fee, + TransactionFeeCurrency: d.FeeCurrency, // WithdrawOrderID: d.WithdrawOrderID, // Network: d.Network, Status: status, - }) + } + allWithdraws = append(allWithdraws, withdraw) } - startTime = endTime + // go next time frame + if len(withdraws) < limit { + startTime = endTime + } else { + // its in descending order, so we get the first record + startTime = time.Unix(withdraws[0].CreatedAt, 0) + } } return allWithdraws, nil @@ -377,7 +714,17 @@ func (e *Exchange) QueryWithdrawHistory(ctx context.Context, asset string, since func (e *Exchange) QueryDepositHistory(ctx context.Context, asset string, since, until time.Time) (allDeposits []types.Deposit, err error) { startTime := since + limit := 1000 txIDs := map[string]struct{}{} + + emptyTime := time.Time{} + if startTime == emptyTime { + startTime, err = e.getLaunchDate() + if err != nil { + return nil, err + } + } + for startTime.Before(until) { // startTime ~ endTime must be in 90 days endTime := startTime.AddDate(0, 0, 60) @@ -386,6 +733,7 @@ func (e *Exchange) QueryDepositHistory(ctx context.Context, asset string, since, } log.Infof("querying deposit history %s: %s <=> %s", asset, startTime, endTime) + req := e.client.AccountService.NewGetDepositHistoryRequest() if len(asset) > 0 { req.Currency(toLocalCurrency(asset)) @@ -393,20 +741,24 @@ func (e *Exchange) QueryDepositHistory(ctx context.Context, asset string, since, deposits, err := req. From(startTime.Unix()). - To(endTime.Unix()).Do(ctx) + To(endTime.Unix()). + Limit(limit). + Do(ctx) if err != nil { return nil, err } - for _, d := range deposits { + for i := len(deposits) - 1; i >= 0; i-- { + d := deposits[i] if _, ok := txIDs[d.TxID]; ok { continue } allDeposits = append(allDeposits, types.Deposit{ - Time: time.Unix(d.CreatedAt, 0), - Amount: util.MustParseFloat(d.Amount), + Exchange: types.ExchangeMax, + Time: types.Time(time.Unix(d.CreatedAt, 0)), + Amount: d.Amount, Asset: toGlobalCurrency(d.Currency), Address: "", // not supported AddressTag: "", // not supported @@ -415,67 +767,121 @@ func (e *Exchange) QueryDepositHistory(ctx context.Context, asset string, since, }) } - startTime = endTime + if len(deposits) < limit { + startTime = endTime + } else { + startTime = time.Unix(deposits[0].CreatedAt, 0) + } } return allDeposits, err } -func (e *Exchange) QueryAccountBalances(ctx context.Context) (types.BalanceMap, error) { - accounts, err := e.client.AccountService.Accounts() - if err != nil { +func (e *Exchange) QueryTrades(ctx context.Context, symbol string, options *types.TradeQueryOptions) (trades []types.Trade, err error) { + if err := tradeQueryLimiter.Wait(ctx); err != nil { return nil, err } - var balances = make(types.BalanceMap) - - for _, a := range accounts { - balances[toGlobalCurrency(a.Currency)] = types.Balance{ - Currency: toGlobalCurrency(a.Currency), - Available: fixedpoint.Must(fixedpoint.NewFromString(a.Balance)), - Locked: fixedpoint.Must(fixedpoint.NewFromString(a.Locked)), - } + market := toLocalSymbol(symbol) + walletType := maxapi.WalletTypeSpot + if e.MarginSettings.IsMargin { + walletType = maxapi.WalletTypeMargin } - return balances, nil -} - -func (e *Exchange) QueryTrades(ctx context.Context, symbol string, options *types.TradeQueryOptions) (trades []types.Trade, err error) { - req := e.client.TradeService.NewPrivateTradeRequest() - req.Market(toLocalSymbol(symbol)) + req := e.v3order.NewGetWalletTradesRequest(walletType) + req.Market(market) if options.Limit > 0 { - req.Limit(options.Limit) + req.Limit(uint64(options.Limit)) + } else { + req.Limit(1000) } + // MAX uses exclusive last trade ID + // the timestamp parameter is used for reverse order, we can't use it. if options.LastTradeID > 0 { req.From(options.LastTradeID) } - // make it compatible with binance, we need the last trade id for the next page. - req.OrderBy("asc") - - remoteTrades, err := req.Do(ctx) + maxTrades, err := req.Do(ctx) if err != nil { return nil, err } - for _, t := range remoteTrades { + for _, t := range maxTrades { localTrade, err := toGlobalTrade(t) if err != nil { - logger.WithError(err).Errorf("can not convert trade: %+v", t) + log.WithError(err).Errorf("can not convert trade: %+v", t) continue } - logger.Infof("T: id=%d % 4s %s P=%f Q=%f %s", localTrade.ID, localTrade.Symbol, localTrade.Side, localTrade.Price, localTrade.Quantity, localTrade.Time) - trades = append(trades, *localTrade) } + // ensure everything is sorted ascending + trades = types.SortTradesAscending(trades) + return trades, nil } +func (e *Exchange) QueryRewards(ctx context.Context, startTime time.Time) ([]types.Reward, error) { + var from = startTime + var emptyTime = time.Time{} + + if from == emptyTime { + from = time.Unix(maxapi.TimestampSince, 0) + } + + var now = time.Now() + for { + if from.After(now) { + return nil, nil + } + + // scan by 30 days + // an user might get most 14 commission records by currency per day + // limit 1000 / 14 = 71 days + to := from.Add(time.Hour * 24 * 30) + req := e.client.RewardService.NewGetRewardsRequest() + req.From(from.Unix()) + req.To(to.Unix()) + req.Limit(1000) + + maxRewards, err := req.Do(ctx) + if err != nil { + return nil, err + } + + if len(maxRewards) == 0 { + // next page + from = to + continue + } + + rewards, err := toGlobalRewards(maxRewards) + if err != nil { + return nil, err + } + + // sort them in the ascending order + sort.Sort(types.RewardSliceByCreationTime(rewards)) + return rewards, nil + } + + return nil, errors.New("unknown error") +} + +// QueryKLines returns the klines from the MAX exchange API. +// The KLine API of the MAX exchange uses inclusive time range +// +// https://max-api.maicoin.com/api/v2/k?market=btctwd&limit=10&period=1×tamp=1620202440 +// The above query will return a kline that starts with 1620202440 (unix timestamp) without endTime. +// We need to calculate the endTime by ourself. func (e *Exchange) QueryKLines(ctx context.Context, symbol string, interval types.Interval, options types.KLineQueryOptions) ([]types.KLine, error) { + if err := marketDataLimiter.Wait(ctx); err != nil { + return nil, err + } + var limit = 5000 if options.Limit > 0 { // default limit == 500 @@ -485,7 +891,7 @@ func (e *Exchange) QueryKLines(ctx context.Context, symbol string, interval type // workaround for the kline query, because MAX does not support query by end time // so we need to use the given end time and the limit number to calculate the start time if options.EndTime != nil && options.StartTime == nil { - startTime := options.EndTime.Add(- time.Duration(limit) * interval.Duration()) + startTime := options.EndTime.Add(-time.Duration(limit) * interval.Duration()) options.StartTime = &startTime } @@ -494,10 +900,6 @@ func (e *Exchange) QueryKLines(ctx context.Context, symbol string, interval type } log.Infof("querying kline %s %s %+v", symbol, interval, options) - - // avoid rate limit - time.Sleep(100 * time.Millisecond) - localKLines, err := e.client.PublicService.KLines(toLocalSymbol(symbol), string(interval), *options.StartTime, limit) if err != nil { return nil, err @@ -505,17 +907,75 @@ func (e *Exchange) QueryKLines(ctx context.Context, symbol string, interval type var kLines []types.KLine for _, k := range localKLines { + if options.EndTime != nil && k.StartTime.After(*options.EndTime) { + break + } + kLines = append(kLines, k.KLine()) } return kLines, nil } -func (e *Exchange) QueryAveragePrice(ctx context.Context, symbol string) (float64, error) { +var Two = fixedpoint.NewFromInt(2) + +func (e *Exchange) QueryAveragePrice(ctx context.Context, symbol string) (fixedpoint.Value, error) { ticker, err := e.client.PublicService.Ticker(toLocalSymbol(symbol)) if err != nil { - return 0, err + return fixedpoint.Zero, err + } + + return fixedpoint.MustNewFromString(ticker.Sell). + Add(fixedpoint.MustNewFromString(ticker.Buy)).Div(Two), nil +} + +func (e *Exchange) RepayMarginAsset(ctx context.Context, asset string, amount fixedpoint.Value) error { + req := e.v3margin.NewMarginRepayRequest() + req.Currency(toLocalCurrency(asset)) + req.Amount(amount.String()) + resp, err := req.Do(ctx) + if err != nil { + return err + } + + log.Infof("margin repay: %v", resp) + return nil +} + +func (e *Exchange) BorrowMarginAsset(ctx context.Context, asset string, amount fixedpoint.Value) error { + req := e.v3margin.NewMarginLoanRequest() + req.Currency(toLocalCurrency(asset)) + req.Amount(amount.String()) + resp, err := req.Do(ctx) + if err != nil { + return err + } + + log.Infof("margin borrow: %v", resp) + return nil +} + +func (e *Exchange) QueryMarginAssetMaxBorrowable(ctx context.Context, asset string) (amount fixedpoint.Value, err error) { + req := e.v3margin.NewGetMarginBorrowingLimitsRequest() + resp, err := req.Do(ctx) + if err != nil { + return fixedpoint.Zero, err + } + + limits := *resp + if limit, ok := limits[toLocalCurrency(asset)]; ok { + return limit, nil } - return (util.MustParseFloat(ticker.Sell) + util.MustParseFloat(ticker.Buy)) / 2, nil + err = fmt.Errorf("borrowing limit of %s not found", asset) + return amount, err +} + +// DefaultFeeRates returns the MAX VIP 0 fee schedule +// See also https://max-vip-zh.maicoin.com/ +func (e *Exchange) DefaultFeeRates() types.ExchangeFee { + return types.ExchangeFee{ + MakerFeeRate: fixedpoint.NewFromFloat(0.01 * 0.045), // 0.045% + TakerFeeRate: fixedpoint.NewFromFloat(0.01 * 0.150), // 0.15% + } } diff --git a/pkg/exchange/max/maxapi/account.go b/pkg/exchange/max/maxapi/account.go index 2a43f2fbb5..bdefd4c60d 100644 --- a/pkg/exchange/max/maxapi/account.go +++ b/pkg/exchange/max/maxapi/account.go @@ -1,6 +1,14 @@ package max -import "context" +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST +//go:generate -command DeleteRequest requestgen -method DELETE + +import ( + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) type AccountService struct { client *RestClient @@ -8,18 +16,18 @@ type AccountService struct { // Account is for max rest api v2, Balance and Type will be conflict with types.PrivateBalanceUpdate type Account struct { - Currency string `json:"currency"` - Balance string `json:"balance"` - Locked string `json:"locked"` - Type string `json:"type"` -} + Type string `json:"type"` + Currency string `json:"currency"` + Balance fixedpoint.Value `json:"balance"` + Locked fixedpoint.Value `json:"locked"` + + // v3 fields for M wallet + Debt fixedpoint.Value `json:"debt"` + Interest fixedpoint.Value `json:"interest"` -// Balance is for kingfisher -type Balance struct { - Currency string - Available int64 - Locked int64 - Total int64 + // v2 fields + FiatCurrency string `json:"fiat_currency"` + FiatBalance fixedpoint.Value `json:"fiat_balance"` } type UserBank struct { @@ -34,6 +42,7 @@ type UserInfo struct { Name string `json:"name"` Type string `json:"member_type"` Level int `json:"level"` + VipLevel int `json:"vip_level"` Email string `json:"email"` Accounts []Account `json:"accounts"` Bank *UserBank `json:"bank,omitempty"` @@ -50,135 +59,69 @@ type UserInfo struct { ReferralCode string `json:"referral_code"` } -func (s *AccountService) Account(currency string) (*Account, error) { - req, err := s.client.newAuthenticatedRequest("GET", "v2/members/accounts/"+currency, nil) - if err != nil { - return nil, err - } - - response, err := s.client.sendRequest(req) - if err != nil { - return nil, err - } - - var account Account - err = response.DecodeJSON(&account) - if err != nil { - return nil, err - } - - return &account, nil +type VipLevelSettings struct { + Level int `json:"level"` + MinimumTradingVolume float64 `json:"minimum_trading_volume"` + MinimumStakingVolume float64 `json:"minimum_staking_volume"` + MakerFee float64 `json:"maker_fee"` + TakerFee float64 `json:"taker_fee"` } -func (s *AccountService) Accounts() ([]Account, error) { - req, err := s.client.newAuthenticatedRequest("GET", "v2/members/accounts", nil) - if err != nil { - return nil, err - } - - response, err := s.client.sendRequest(req) - if err != nil { - return nil, err - } - - var accounts []Account - err = response.DecodeJSON(&accounts) - if err != nil { - return nil, err - } - - return accounts, nil +type VipLevel struct { + Current VipLevelSettings `json:"current_vip_level"` + Next VipLevelSettings `json:"next_vip_level"` } -// Me returns the current user info by the current used MAX key and secret -func (s *AccountService) Me() (*UserInfo, error) { - req, err := s.client.newAuthenticatedRequest("GET", "v2/members/me", nil) - if err != nil { - return nil, err - } - - response, err := s.client.sendRequest(req) - if err != nil { - return nil, err - } - - var m = UserInfo{} - err = response.DecodeJSON(&m) - if err != nil { - return nil, err - } - - return &m, nil +//go:generate GetRequest -url "v2/members/vip_level" -type GetVipLevelRequest -responseType .VipLevel +type GetVipLevelRequest struct { + client requestgen.AuthenticatedAPIClient } -type Deposit struct { - Currency string `json:"currency"` - CurrencyVersion string `json:"currency_version"` // "eth" - Amount string `json:"amount"` - Fee string `json:"fee"` - TxID string `json:"txid"` - State string `json:"state"` - Confirmations string `json:"confirmations"` - CreatedAt int64 `json:"created_at"` - UpdatedAt int64 `json:"updated_at"` +func (s *AccountService) NewGetVipLevelRequest() *GetVipLevelRequest { + return &GetVipLevelRequest{client: s.client} } -type GetDepositHistoryRequestParams struct { - *PrivateRequestParams - - Currency string `json:"currency,omitempty"` - From int64 `json:"from,omitempty"` // seconds - To int64 `json:"to,omitempty"` // seconds - State string `json:"state,omitempty"` // submitting, submitted, rejected, accepted, checking, refunded, canceled, suspect - Limit int `json:"limit,omitempty"` -} - -type GetDepositHistoryRequest struct { - client *RestClient - params GetDepositHistoryRequestParams -} - -func (r *GetDepositHistoryRequest) State(state string) *GetDepositHistoryRequest { - r.params.State = state - return r -} +//go:generate GetRequest -url "v2/members/accounts/:currency" -type GetAccountRequest -responseType .Account +type GetAccountRequest struct { + client requestgen.AuthenticatedAPIClient -func (r *GetDepositHistoryRequest) Currency(currency string) *GetDepositHistoryRequest { - r.params.Currency = currency - return r + currency string `param:"currency,slug"` } -func (r *GetDepositHistoryRequest) Limit(limit int) *GetDepositHistoryRequest { - r.params.Limit = limit - return r +func (s *AccountService) NewGetAccountRequest() *GetAccountRequest { + return &GetAccountRequest{client: s.client} } -func (r *GetDepositHistoryRequest) From(from int64) *GetDepositHistoryRequest { - r.params.From = from - return r +//go:generate GetRequest -url "v2/members/accounts" -type GetAccountsRequest -responseType []Account +type GetAccountsRequest struct { + client requestgen.AuthenticatedAPIClient } -func (r *GetDepositHistoryRequest) To(to int64) *GetDepositHistoryRequest { - r.params.To = to - return r +func (s *AccountService) NewGetAccountsRequest() *GetAccountsRequest { + return &GetAccountsRequest{client: s.client} } -func (r *GetDepositHistoryRequest) Do(ctx context.Context) (deposits []Deposit, err error) { - req, err := r.client.newAuthenticatedRequest("GET", "v2/deposits", &r.params) - if err != nil { - return deposits, err - } - - response, err := r.client.sendRequest(req) - if err != nil { - return deposits, err - } - - if err := response.DecodeJSON(&deposits); err != nil { - return deposits, err - } +type Deposit struct { + Currency string `json:"currency"` + CurrencyVersion string `json:"currency_version"` // "eth" + Amount fixedpoint.Value `json:"amount"` + Fee fixedpoint.Value `json:"fee"` + TxID string `json:"txid"` + State string `json:"state"` + Confirmations int64 `json:"confirmations"` + CreatedAt int64 `json:"created_at"` + UpdatedAt int64 `json:"updated_at"` +} + +//go:generate GetRequest -url "v2/deposits" -type GetDepositHistoryRequest -responseType []Deposit +type GetDepositHistoryRequest struct { + client requestgen.AuthenticatedAPIClient - return deposits, err + currency *string `param:"currency"` + from *int64 `param:"from"` // seconds + to *int64 `param:"to"` // seconds + state *string `param:"state"` // submitting, submitted, rejected, accepted, checking, refunded, canceled, suspect + limit *int `param:"limit"` } func (s *AccountService) NewGetDepositHistoryRequest() *GetDepositHistoryRequest { @@ -187,15 +130,14 @@ func (s *AccountService) NewGetDepositHistoryRequest() *GetDepositHistoryRequest } } - - type Withdraw struct { - UUID string `json:"uuid"` - Currency string `json:"currency"` - CurrencyVersion string `json:"currency_version"` // "eth" - Amount string `json:"amount"` - Fee string `json:"fee"` - TxID string `json:"txid"` + UUID string `json:"uuid"` + Currency string `json:"currency"` + CurrencyVersion string `json:"currency_version"` // "eth" + Amount fixedpoint.Value `json:"amount"` + Fee fixedpoint.Value `json:"fee"` + FeeCurrency string `json:"fee_currency"` + TxID string `json:"txid"` // State can be "submitting", "submitted", // "rejected", "accepted", "suspect", "approved", "delisted_processing", @@ -203,68 +145,22 @@ type Withdraw struct { // "failed", "pending", "confirmed", // "kgi_manually_processing", "kgi_manually_confirmed", "kgi_possible_failed", // "sygna_verifying" - State string `json:"state"` - Confirmations int `json:"confirmations"` - CreatedAt int64 `json:"created_at"` - UpdatedAt int64 `json:"updated_at"` -} - -type GetWithdrawHistoryRequestParams struct { - *PrivateRequestParams - - Currency string `json:"currency,omitempty"` - From int64 `json:"from,omitempty"` // seconds - To int64 `json:"to,omitempty"` // seconds - State string `json:"state,omitempty"` // submitting, submitted, rejected, accepted, checking, refunded, canceled, suspect - Limit int `json:"limit,omitempty"` + State string `json:"state"` + Confirmations int `json:"confirmations"` + CreatedAt int64 `json:"created_at"` + UpdatedAt int64 `json:"updated_at"` + Notes string `json:"notes"` } +//go:generate GetRequest -url "v2/withdrawals" -type GetWithdrawHistoryRequest -responseType []Withdraw type GetWithdrawHistoryRequest struct { - client *RestClient - params GetWithdrawHistoryRequestParams -} - -func (r *GetWithdrawHistoryRequest) State(state string) *GetWithdrawHistoryRequest { - r.params.State = state - return r -} - -func (r *GetWithdrawHistoryRequest) Currency(currency string) *GetWithdrawHistoryRequest { - r.params.Currency = currency - return r -} - -func (r *GetWithdrawHistoryRequest) Limit(limit int) *GetWithdrawHistoryRequest { - r.params.Limit = limit - return r -} - -func (r *GetWithdrawHistoryRequest) From(from int64) *GetWithdrawHistoryRequest { - r.params.From = from - return r -} - -func (r *GetWithdrawHistoryRequest) To(to int64) *GetWithdrawHistoryRequest { - r.params.To = to - return r -} - -func (r *GetWithdrawHistoryRequest) Do(ctx context.Context) (withdraws []Withdraw, err error) { - req, err := r.client.newAuthenticatedRequest("GET", "v2/withdrawals", &r.params) - if err != nil { - return withdraws, err - } - - response, err := r.client.sendRequest(req) - if err != nil { - return withdraws, err - } - - if err := response.DecodeJSON(&withdraws); err != nil { - return withdraws, err - } + client requestgen.AuthenticatedAPIClient - return withdraws, err + currency string `param:"currency"` + from *int64 `param:"from"` // seconds + to *int64 `param:"to"` // seconds + state *string `param:"state"` // submitting, submitted, rejected, accepted, checking, refunded, canceled, suspect + limit *int `param:"limit"` } func (s *AccountService) NewGetWithdrawalHistoryRequest() *GetWithdrawHistoryRequest { diff --git a/pkg/exchange/max/maxapi/account_test.go b/pkg/exchange/max/maxapi/account_test.go new file mode 100644 index 0000000000..e082586be4 --- /dev/null +++ b/pkg/exchange/max/maxapi/account_test.go @@ -0,0 +1,112 @@ +package max + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestAccountService_GetAccountsRequest(t *testing.T) { + key, secret, ok := integrationTestConfigured(t, "MAX") + if !ok { + t.SkipNow() + } + + ctx := context.Background() + + client := NewRestClient(ProductionAPIURL) + client.Auth(key, secret) + + req := client.AccountService.NewGetAccountsRequest() + accounts, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotNil(t, accounts) + assert.NotEmpty(t, accounts) + + t.Logf("accounts: %+v", accounts) +} + +func TestAccountService_GetAccountRequest(t *testing.T) { + key, secret, ok := integrationTestConfigured(t, "MAX") + if !ok { + t.SkipNow() + } + + ctx := context.Background() + + client := NewRestClient(ProductionAPIURL) + client.Auth(key, secret) + + req := client.AccountService.NewGetAccountRequest() + req.Currency("twd") + account, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotNil(t, account) + t.Logf("account: %+v", account) + + req2 := client.AccountService.NewGetAccountRequest() + req2.Currency("usdt") + account, err = req.Do(ctx) + assert.NoError(t, err) + assert.NotNil(t, account) + t.Logf("account: %+v", account) +} + +func TestAccountService_GetVipLevelRequest(t *testing.T) { + key, secret, ok := integrationTestConfigured(t, "MAX") + if !ok { + t.SkipNow() + } + + ctx := context.Background() + + client := NewRestClient(ProductionAPIURL) + client.Auth(key, secret) + + req := client.AccountService.NewGetVipLevelRequest() + vipLevel, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotNil(t, vipLevel) + t.Logf("vipLevel: %+v", vipLevel) +} + +func TestAccountService_GetWithdrawHistoryRequest(t *testing.T) { + key, secret, ok := integrationTestConfigured(t, "MAX") + if !ok { + t.SkipNow() + } + + ctx := context.Background() + + client := NewRestClient(ProductionAPIURL) + client.Auth(key, secret) + + req := client.AccountService.NewGetWithdrawalHistoryRequest() + req.Currency("usdt") + withdraws, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotNil(t, withdraws) + assert.NotEmpty(t, withdraws) + t.Logf("withdraws: %+v", withdraws) +} + +func TestAccountService_NewGetDepositHistoryRequest(t *testing.T) { + key, secret, ok := integrationTestConfigured(t, "MAX") + if !ok { + t.SkipNow() + } + + ctx := context.Background() + + client := NewRestClient(ProductionAPIURL) + client.Auth(key, secret) + + req := client.AccountService.NewGetDepositHistoryRequest() + req.Currency("usdt") + deposits, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotNil(t, deposits) + assert.NotEmpty(t, deposits) + t.Logf("deposits: %+v", deposits) +} diff --git a/pkg/exchange/max/maxapi/auth.go b/pkg/exchange/max/maxapi/auth.go index c353c56dd0..15629dca89 100644 --- a/pkg/exchange/max/maxapi/auth.go +++ b/pkg/exchange/max/maxapi/auth.go @@ -1,11 +1,12 @@ package max type AuthMessage struct { - Action string `json:"action"` - APIKey string `json:"apiKey"` - Nonce int64 `json:"nonce"` - Signature string `json:"signature"` - ID string `json:"id"` + Action string `json:"action,omitempty"` + APIKey string `json:"apiKey,omitempty"` + Nonce int64 `json:"nonce,omitempty"` + Signature string `json:"signature,omitempty"` + ID string `json:"id,omitempty"` + Filters []string `json:"filters,omitempty"` } type AuthEvent struct { diff --git a/pkg/exchange/max/maxapi/get_account_request_requestgen.go b/pkg/exchange/max/maxapi/get_account_request_requestgen.go new file mode 100644 index 0000000000..4d71c47092 --- /dev/null +++ b/pkg/exchange/max/maxapi/get_account_request_requestgen.go @@ -0,0 +1,151 @@ +// Code generated by "requestgen -method GET -url v2/members/accounts/:currency -type GetAccountRequest -responseType .Account"; DO NOT EDIT. + +package max + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +func (g *GetAccountRequest) Currency(currency string) *GetAccountRequest { + g.currency = currency + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetAccountRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetAccountRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetAccountRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + if g.isVarSlice(v) { + g.iterateSlice(v, func(it interface{}) { + query.Add(k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(k, fmt.Sprintf("%v", v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetAccountRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetAccountRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check currency field -> json key currency + currency := g.currency + + // assign parameter of currency + params["currency"] = currency + + return params, nil +} + +func (g *GetAccountRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetAccountRequest) iterateSlice(slice interface{}, f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for i := 0; i < sliceValue.Len(); i++ { + it := sliceValue.Index(i).Interface() + f(it) + } +} + +func (g *GetAccountRequest) isVarSlice(v interface{}) bool { + rt := reflect.TypeOf(v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetAccountRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetAccountRequest) Do(ctx context.Context) (*Account, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "v2/members/accounts/:currency" + slugs, err := g.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = g.applySlugsToUrl(apiURL, slugs) + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Account + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/get_accounts_request_requestgen.go b/pkg/exchange/max/maxapi/get_accounts_request_requestgen.go new file mode 100644 index 0000000000..b475ca8607 --- /dev/null +++ b/pkg/exchange/max/maxapi/get_accounts_request_requestgen.go @@ -0,0 +1,135 @@ +// Code generated by "requestgen -method GET -url v2/members/accounts -type GetAccountsRequest -responseType []Account"; DO NOT EDIT. + +package max + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetAccountsRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetAccountsRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetAccountsRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + if g.isVarSlice(v) { + g.iterateSlice(v, func(it interface{}) { + query.Add(k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(k, fmt.Sprintf("%v", v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetAccountsRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetAccountsRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetAccountsRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetAccountsRequest) iterateSlice(slice interface{}, f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for i := 0; i < sliceValue.Len(); i++ { + it := sliceValue.Index(i).Interface() + f(it) + } +} + +func (g *GetAccountsRequest) isVarSlice(v interface{}) bool { + rt := reflect.TypeOf(v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetAccountsRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetAccountsRequest) Do(ctx context.Context) ([]Account, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "v2/members/accounts" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []Account + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/get_deposit_history_request_requestgen.go b/pkg/exchange/max/maxapi/get_deposit_history_request_requestgen.go new file mode 100644 index 0000000000..444272a554 --- /dev/null +++ b/pkg/exchange/max/maxapi/get_deposit_history_request_requestgen.go @@ -0,0 +1,203 @@ +// Code generated by "requestgen -method GET -url v2/deposits -type GetDepositHistoryRequest -responseType []Deposit"; DO NOT EDIT. + +package max + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +func (g *GetDepositHistoryRequest) Currency(currency string) *GetDepositHistoryRequest { + g.currency = ¤cy + return g +} + +func (g *GetDepositHistoryRequest) From(from int64) *GetDepositHistoryRequest { + g.from = &from + return g +} + +func (g *GetDepositHistoryRequest) To(to int64) *GetDepositHistoryRequest { + g.to = &to + return g +} + +func (g *GetDepositHistoryRequest) State(state string) *GetDepositHistoryRequest { + g.state = &state + return g +} + +func (g *GetDepositHistoryRequest) Limit(limit int) *GetDepositHistoryRequest { + g.limit = &limit + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetDepositHistoryRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetDepositHistoryRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check currency field -> json key currency + if g.currency != nil { + currency := *g.currency + + // assign parameter of currency + params["currency"] = currency + } else { + } + // check from field -> json key from + if g.from != nil { + from := *g.from + + // assign parameter of from + params["from"] = from + } else { + } + // check to field -> json key to + if g.to != nil { + to := *g.to + + // assign parameter of to + params["to"] = to + } else { + } + // check state field -> json key state + if g.state != nil { + state := *g.state + + // assign parameter of state + params["state"] = state + } else { + } + // check limit field -> json key limit + if g.limit != nil { + limit := *g.limit + + // assign parameter of limit + params["limit"] = limit + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetDepositHistoryRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + if g.isVarSlice(v) { + g.iterateSlice(v, func(it interface{}) { + query.Add(k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(k, fmt.Sprintf("%v", v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetDepositHistoryRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetDepositHistoryRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetDepositHistoryRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetDepositHistoryRequest) iterateSlice(slice interface{}, f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for i := 0; i < sliceValue.Len(); i++ { + it := sliceValue.Index(i).Interface() + f(it) + } +} + +func (g *GetDepositHistoryRequest) isVarSlice(v interface{}) bool { + rt := reflect.TypeOf(v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetDepositHistoryRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetDepositHistoryRequest) Do(ctx context.Context) ([]Deposit, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "v2/deposits" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []Deposit + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/get_rewards_of_type_request_requestgen.go b/pkg/exchange/max/maxapi/get_rewards_of_type_request_requestgen.go new file mode 100644 index 0000000000..225d48aa49 --- /dev/null +++ b/pkg/exchange/max/maxapi/get_rewards_of_type_request_requestgen.go @@ -0,0 +1,222 @@ +// Code generated by "requestgen -method GET -url v2/rewards/:path_type -type GetRewardsOfTypeRequest -responseType []Reward"; DO NOT EDIT. + +package max + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +func (g *GetRewardsOfTypeRequest) From(from int64) *GetRewardsOfTypeRequest { + g.from = &from + return g +} + +func (g *GetRewardsOfTypeRequest) To(to int64) *GetRewardsOfTypeRequest { + g.to = &to + return g +} + +func (g *GetRewardsOfTypeRequest) Page(page int64) *GetRewardsOfTypeRequest { + g.page = &page + return g +} + +func (g *GetRewardsOfTypeRequest) Limit(limit int64) *GetRewardsOfTypeRequest { + g.limit = &limit + return g +} + +func (g *GetRewardsOfTypeRequest) Offset(offset int64) *GetRewardsOfTypeRequest { + g.offset = &offset + return g +} + +func (g *GetRewardsOfTypeRequest) PathType(pathType RewardType) *GetRewardsOfTypeRequest { + g.pathType = &pathType + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetRewardsOfTypeRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetRewardsOfTypeRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check from field -> json key from + if g.from != nil { + from := *g.from + + // assign parameter of from + params["from"] = from + } else { + } + // check to field -> json key to + if g.to != nil { + to := *g.to + + // assign parameter of to + params["to"] = to + } else { + } + // check page field -> json key page + if g.page != nil { + page := *g.page + + // assign parameter of page + params["page"] = page + } else { + } + // check limit field -> json key limit + if g.limit != nil { + limit := *g.limit + + // assign parameter of limit + params["limit"] = limit + } else { + } + // check offset field -> json key offset + if g.offset != nil { + offset := *g.offset + + // assign parameter of offset + params["offset"] = offset + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetRewardsOfTypeRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + if g.isVarSlice(v) { + g.iterateSlice(v, func(it interface{}) { + query.Add(k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(k, fmt.Sprintf("%v", v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetRewardsOfTypeRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetRewardsOfTypeRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check pathType field -> json key path_type + if g.pathType != nil { + pathType := *g.pathType + + // assign parameter of pathType + params["path_type"] = pathType + + } + + return params, nil +} + +func (g *GetRewardsOfTypeRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetRewardsOfTypeRequest) iterateSlice(slice interface{}, f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for i := 0; i < sliceValue.Len(); i++ { + it := sliceValue.Index(i).Interface() + f(it) + } +} + +func (g *GetRewardsOfTypeRequest) isVarSlice(v interface{}) bool { + rt := reflect.TypeOf(v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetRewardsOfTypeRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetRewardsOfTypeRequest) Do(ctx context.Context) ([]Reward, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "v2/rewards/:path_type" + slugs, err := g.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = g.applySlugsToUrl(apiURL, slugs) + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []Reward + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/get_rewards_request_requestgen.go b/pkg/exchange/max/maxapi/get_rewards_request_requestgen.go new file mode 100644 index 0000000000..e21f1b5295 --- /dev/null +++ b/pkg/exchange/max/maxapi/get_rewards_request_requestgen.go @@ -0,0 +1,216 @@ +// Code generated by "requestgen -method GET -url v2/rewards -type GetRewardsRequest -responseType []Reward"; DO NOT EDIT. + +package max + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +func (g *GetRewardsRequest) Currency(currency string) *GetRewardsRequest { + g.currency = ¤cy + return g +} + +func (g *GetRewardsRequest) From(from int64) *GetRewardsRequest { + g.from = &from + return g +} + +func (g *GetRewardsRequest) To(to int64) *GetRewardsRequest { + g.to = &to + return g +} + +func (g *GetRewardsRequest) Page(page int64) *GetRewardsRequest { + g.page = &page + return g +} + +func (g *GetRewardsRequest) Limit(limit int64) *GetRewardsRequest { + g.limit = &limit + return g +} + +func (g *GetRewardsRequest) Offset(offset int64) *GetRewardsRequest { + g.offset = &offset + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetRewardsRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetRewardsRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check currency field -> json key currency + if g.currency != nil { + currency := *g.currency + + // assign parameter of currency + params["currency"] = currency + } else { + } + // check from field -> json key from + if g.from != nil { + from := *g.from + + // assign parameter of from + params["from"] = from + } else { + } + // check to field -> json key to + if g.to != nil { + to := *g.to + + // assign parameter of to + params["to"] = to + } else { + } + // check page field -> json key page + if g.page != nil { + page := *g.page + + // assign parameter of page + params["page"] = page + } else { + } + // check limit field -> json key limit + if g.limit != nil { + limit := *g.limit + + // assign parameter of limit + params["limit"] = limit + } else { + } + // check offset field -> json key offset + if g.offset != nil { + offset := *g.offset + + // assign parameter of offset + params["offset"] = offset + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetRewardsRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + if g.isVarSlice(v) { + g.iterateSlice(v, func(it interface{}) { + query.Add(k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(k, fmt.Sprintf("%v", v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetRewardsRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetRewardsRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetRewardsRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetRewardsRequest) iterateSlice(slice interface{}, f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for i := 0; i < sliceValue.Len(); i++ { + it := sliceValue.Index(i).Interface() + f(it) + } +} + +func (g *GetRewardsRequest) isVarSlice(v interface{}) bool { + rt := reflect.TypeOf(v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetRewardsRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetRewardsRequest) Do(ctx context.Context) ([]Reward, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "v2/rewards" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []Reward + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/get_vip_level_request_requestgen.go b/pkg/exchange/max/maxapi/get_vip_level_request_requestgen.go new file mode 100644 index 0000000000..e66465f805 --- /dev/null +++ b/pkg/exchange/max/maxapi/get_vip_level_request_requestgen.go @@ -0,0 +1,135 @@ +// Code generated by "requestgen -method GET -url v2/members/vip_level -type GetVipLevelRequest -responseType .VipLevel"; DO NOT EDIT. + +package max + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetVipLevelRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetVipLevelRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetVipLevelRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + if g.isVarSlice(v) { + g.iterateSlice(v, func(it interface{}) { + query.Add(k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(k, fmt.Sprintf("%v", v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetVipLevelRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetVipLevelRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetVipLevelRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetVipLevelRequest) iterateSlice(slice interface{}, f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for i := 0; i < sliceValue.Len(); i++ { + it := sliceValue.Index(i).Interface() + f(it) + } +} + +func (g *GetVipLevelRequest) isVarSlice(v interface{}) bool { + rt := reflect.TypeOf(v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetVipLevelRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetVipLevelRequest) Do(ctx context.Context) (*VipLevel, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "v2/members/vip_level" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse VipLevel + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/get_withdraw_history_request_requestgen.go b/pkg/exchange/max/maxapi/get_withdraw_history_request_requestgen.go new file mode 100644 index 0000000000..3f66dbd43e --- /dev/null +++ b/pkg/exchange/max/maxapi/get_withdraw_history_request_requestgen.go @@ -0,0 +1,200 @@ +// Code generated by "requestgen -method GET -url v2/withdrawals -type GetWithdrawHistoryRequest -responseType []Withdraw"; DO NOT EDIT. + +package max + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +func (g *GetWithdrawHistoryRequest) Currency(currency string) *GetWithdrawHistoryRequest { + g.currency = currency + return g +} + +func (g *GetWithdrawHistoryRequest) From(from int64) *GetWithdrawHistoryRequest { + g.from = &from + return g +} + +func (g *GetWithdrawHistoryRequest) To(to int64) *GetWithdrawHistoryRequest { + g.to = &to + return g +} + +func (g *GetWithdrawHistoryRequest) State(state string) *GetWithdrawHistoryRequest { + g.state = &state + return g +} + +func (g *GetWithdrawHistoryRequest) Limit(limit int) *GetWithdrawHistoryRequest { + g.limit = &limit + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetWithdrawHistoryRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetWithdrawHistoryRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check currency field -> json key currency + currency := g.currency + + // assign parameter of currency + params["currency"] = currency + // check from field -> json key from + if g.from != nil { + from := *g.from + + // assign parameter of from + params["from"] = from + } else { + } + // check to field -> json key to + if g.to != nil { + to := *g.to + + // assign parameter of to + params["to"] = to + } else { + } + // check state field -> json key state + if g.state != nil { + state := *g.state + + // assign parameter of state + params["state"] = state + } else { + } + // check limit field -> json key limit + if g.limit != nil { + limit := *g.limit + + // assign parameter of limit + params["limit"] = limit + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetWithdrawHistoryRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + if g.isVarSlice(v) { + g.iterateSlice(v, func(it interface{}) { + query.Add(k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(k, fmt.Sprintf("%v", v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetWithdrawHistoryRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetWithdrawHistoryRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetWithdrawHistoryRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetWithdrawHistoryRequest) iterateSlice(slice interface{}, f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for i := 0; i < sliceValue.Len(); i++ { + it := sliceValue.Index(i).Interface() + f(it) + } +} + +func (g *GetWithdrawHistoryRequest) isVarSlice(v interface{}) bool { + rt := reflect.TypeOf(v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetWithdrawHistoryRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetWithdrawHistoryRequest) Do(ctx context.Context) ([]Withdraw, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "v2/withdrawals" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []Withdraw + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/get_withdrawal_addresses_request_requestgen.go b/pkg/exchange/max/maxapi/get_withdrawal_addresses_request_requestgen.go new file mode 100644 index 0000000000..277e5d5ef0 --- /dev/null +++ b/pkg/exchange/max/maxapi/get_withdrawal_addresses_request_requestgen.go @@ -0,0 +1,154 @@ +// Code generated by "requestgen -method GET -url v2/withdraw_addresses -type GetWithdrawalAddressesRequest -responseType []WithdrawalAddress"; DO NOT EDIT. + +package max + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +func (g *GetWithdrawalAddressesRequest) Currency(currency string) *GetWithdrawalAddressesRequest { + g.currency = currency + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetWithdrawalAddressesRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetWithdrawalAddressesRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check currency field -> json key currency + currency := g.currency + + // TEMPLATE check-required + if len(currency) == 0 { + return nil, fmt.Errorf("currency is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of currency + params["currency"] = currency + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetWithdrawalAddressesRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + if g.isVarSlice(v) { + g.iterateSlice(v, func(it interface{}) { + query.Add(k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(k, fmt.Sprintf("%v", v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetWithdrawalAddressesRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetWithdrawalAddressesRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetWithdrawalAddressesRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (g *GetWithdrawalAddressesRequest) iterateSlice(slice interface{}, f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for i := 0; i < sliceValue.Len(); i++ { + it := sliceValue.Index(i).Interface() + f(it) + } +} + +func (g *GetWithdrawalAddressesRequest) isVarSlice(v interface{}) bool { + rt := reflect.TypeOf(v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetWithdrawalAddressesRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (g *GetWithdrawalAddressesRequest) Do(ctx context.Context) ([]WithdrawalAddress, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "v2/withdraw_addresses" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []WithdrawalAddress + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/order.go b/pkg/exchange/max/maxapi/order.go index 1d6a8f4bc2..004b0737e6 100644 --- a/pkg/exchange/max/maxapi/order.go +++ b/pkg/exchange/max/maxapi/order.go @@ -1,11 +1,18 @@ package max +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST + import ( - "context" - "strconv" - "time" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +type WalletType string - "github.com/pkg/errors" +const ( + WalletTypeSpot WalletType = "spot" + WalletTypeMargin WalletType = "m" ) type OrderStateToQuery int @@ -19,7 +26,8 @@ const ( type OrderState string const ( - OrderStateDone = OrderState("done") + OrderStateDone = OrderState("done") + OrderStateCancel = OrderState("cancel") OrderStateWait = OrderState("wait") OrderStateConvert = OrderState("convert") @@ -33,14 +41,18 @@ type OrderType string const ( OrderTypeMarket = OrderType("market") OrderTypeLimit = OrderType("limit") + OrderTypePostOnly = OrderType("post_only") OrderTypeStopLimit = OrderType("stop_limit") OrderTypeStopMarket = OrderType("stop_market") + OrderTypeIOCLimit = OrderType("ioc_limit") ) type QueryOrderOptions struct { GroupID int Offset int Limit int + Page int + OrderBy string } // OrderService manages the Order endpoint. @@ -48,433 +60,33 @@ type OrderService struct { client *RestClient } -// Order represents one returned order (POST order/GET order/GET orders) on the max platform. -type Order struct { - ID uint64 `json:"id,omitempty" db:"exchange_id"` - Side string `json:"side" db:"side"` - OrderType OrderType `json:"ord_type,omitempty" db:"order_type"` - Price string `json:"price" db:"price"` - AveragePrice string `json:"avg_price,omitempty" db:"average_price"` - State OrderState `json:"state,omitempty" db:"state"` - Market string `json:"market,omitempty" db:"market"` - Volume string `json:"volume" db:"volume"` - RemainingVolume string `json:"remaining_volume,omitempty" db:"remaining_volume"` - ExecutedVolume string `json:"executed_volume,omitempty" db:"executed_volume"` - TradesCount int64 `json:"trades_count,omitempty" db:"trades_count"` - GroupID int64 `json:"group_id,omitempty" db:"group_id"` - ClientOID string `json:"client_oid,omitempty" db:"client_oid"` - CreatedAt time.Time `json:"-" db:"created_at"` - CreatedAtMs int64 `json:"created_at_in_ms,omitempty"` - InsertedAt time.Time `json:"-" db:"inserted_at"` -} - -// Open returns open orders -func (s *OrderService) Closed(market string, options QueryOrderOptions) ([]Order, error) { - payload := map[string]interface{}{ - "market": market, - "state": []OrderState{OrderStateFinalizing, OrderStateDone, OrderStateCancel, OrderStateFailed}, - "order_by": "desc", - "pagination": false, - } - - if options.GroupID > 0 { - payload["group_id"] = options.GroupID - } - if options.Offset > 0 { - payload["offset"] = options.Offset - } - if options.Limit > 0 { - payload["limit"] = options.Limit - } - - req, err := s.client.newAuthenticatedRequest("GET", "v2/orders", payload) - if err != nil { - return nil, err - } - - response, err := s.client.sendRequest(req) - if err != nil { - return nil, err - } - - var orders []Order - if err := response.DecodeJSON(&orders); err != nil { - return nil, err - } - - return orders, nil -} - -// Open returns open orders -func (s *OrderService) Open(market string, options QueryOrderOptions) ([]Order, error) { - payload := map[string]interface{}{ - "market": market, - // "state": []OrderState{OrderStateWait, OrderStateConvert}, - "order_by": "desc", - "pagination": false, - } - - if options.GroupID > 0 { - payload["group_id"] = options.GroupID - } - - req, err := s.client.newAuthenticatedRequest("GET", "v2/orders", payload) - if err != nil { - return nil, err - } - - response, err := s.client.sendRequest(req) - if err != nil { - return nil, err - } - - var orders []Order - if err := response.DecodeJSON(&orders); err != nil { - return nil, err - } - - return orders, nil -} - -// All returns all orders for the authenticated account. -func (s *OrderService) All(market string, limit, page int, states ...OrderState) ([]Order, error) { - payload := map[string]interface{}{ - "market": market, - "limit": limit, - "page": page, - "state": states, - "order_by": "desc", - } - - req, err := s.client.newAuthenticatedRequest("GET", "v2/orders", payload) - if err != nil { - return nil, err - } - - response, err := s.client.sendRequest(req) - if err != nil { - return nil, err - } - - var orders []Order - if err := response.DecodeJSON(&orders); err != nil { - return nil, err - } - - return orders, nil -} - -// CancelAll active orders for the authenticated account. -func (s *OrderService) CancelAll(side string, market string) error { - payload := map[string]interface{}{} - if side == "buy" || side == "sell" { - payload["side"] = side - } - if market != "all" { - payload["market"] = market - } - - req, err := s.client.newAuthenticatedRequest("POST", "v2/orders/clear", payload) - if err != nil { - return err - } - - _, err = s.client.sendRequest(req) - if err != nil { - return err - } - - return nil -} - -// Options carry the option fields for REST API -type Options map[string]interface{} - -// Create a new order. -func (s *OrderService) Create(market string, side string, volume float64, price float64, orderType string, options Options) (*Order, error) { - options["market"] = market - options["volume"] = strconv.FormatFloat(volume, 'f', -1, 64) - options["price"] = strconv.FormatFloat(price, 'f', -1, 64) - options["side"] = side - options["ord_type"] = orderType - response, err := s.client.sendAuthenticatedRequest("POST", "v2/orders", options) - if err != nil { - return nil, err - } - - var order = Order{} - if err := response.DecodeJSON(&order); err != nil { - return nil, err - } - - return &order, nil -} - -// Create multiple order in a single request -func (s *OrderService) CreateMulti(market string, orders []Order) (*MultiOrderResponse, error) { - req := s.NewCreateMultiOrderRequest() - req.Market(market) - req.AddOrders(orders...) - return req.Do(context.Background()) -} - -// Cancel the order with id `orderID`. -func (s *OrderService) Cancel(orderID uint64, clientOrderID string) error { - req := s.NewOrderCancelRequest() - - if orderID > 0 { - req.ID(orderID) - } else if len(clientOrderID) > 0 { - req.ClientOrderID(clientOrderID) - } - - return req.Do(context.Background()) -} - -type OrderCancelAllRequestParams struct { - *PrivateRequestParams - - Side string `json:"side,omitempty"` - Market string `json:"market,omitempty"` - GroupID int64 `json:"groupID,omitempty"` -} - -type OrderCancelAllRequest struct { - client *RestClient - - params OrderCancelAllRequestParams -} - -func (r *OrderCancelAllRequest) Side(side string) *OrderCancelAllRequest { - r.params.Side = side - return r -} - -func (r *OrderCancelAllRequest) Market(market string) *OrderCancelAllRequest { - r.params.Market = market - return r -} - -func (r *OrderCancelAllRequest) GroupID(groupID int64) *OrderCancelAllRequest { - r.params.GroupID = groupID - return r -} - -func (r *OrderCancelAllRequest) Do(ctx context.Context) (orders []Order, err error) { - req, err := r.client.newAuthenticatedRequest("POST", "v2/orders/clear", &r.params) - if err != nil { - return - } - - response, err := r.client.sendRequest(req) - if err != nil { - return - } - - err = response.DecodeJSON(&orders) - return -} - -func (s *OrderService) NewOrderCancelAllRequest() *OrderCancelAllRequest { - return &OrderCancelAllRequest{client: s.client} -} - -type OrderCancelRequestParams struct { - *PrivateRequestParams - - ID uint64 `json:"id,omitempty"` - ClientOrderID string `json:"client_oid,omitempty"` -} - -type OrderCancelRequest struct { - client *RestClient - - params OrderCancelRequestParams -} - -func (r *OrderCancelRequest) ID(id uint64) *OrderCancelRequest { - r.params.ID = id - return r -} - -func (r *OrderCancelRequest) ClientOrderID(id string) *OrderCancelRequest { - r.params.ClientOrderID = id - return r -} - -func (r *OrderCancelRequest) Do(ctx context.Context) error { - req, err := r.client.newAuthenticatedRequest("POST", "v2/order/delete", &r.params) - if err != nil { - return err - } - - response, err := r.client.sendRequest(req) - if err != nil { - return err - } - - var order = Order{} - if err := response.DecodeJSON(&order); err != nil { - return err - } - - return err -} - -func (s *OrderService) NewOrderCancelRequest() *OrderCancelRequest { - return &OrderCancelRequest{client: s.client} -} - -// Status retrieves the given order from the API. -func (s *OrderService) Get(orderID uint64) (*Order, error) { - payload := map[string]interface{}{ - "id": orderID, - } - - req, err := s.client.newAuthenticatedRequest("GET", "v2/order", payload) - - if err != nil { - return &Order{}, err - } - - response, err := s.client.sendRequest(req) - if err != nil { - return nil, err - } - - var order = Order{} - - if err := response.DecodeJSON(&order); err != nil { - return nil, err - } - - return &order, nil -} - -type MultiOrderRequestParams struct { - *PrivateRequestParams - - Market string `json:"market"` - Orders []Order `json:"orders"` -} - -type MultiOrderResponse []struct { - Error string `json:"error,omitempty"` - Order Order `json:"order,omitempty"` -} - -type CreateMultiOrderRequest struct { - client *RestClient - - params MultiOrderRequestParams -} - -func (r *CreateMultiOrderRequest) Market(market string) *CreateMultiOrderRequest { - r.params.Market = market - return r -} - -func (r *CreateMultiOrderRequest) AddOrders(orders ...Order) *CreateMultiOrderRequest { - r.params.Orders = append(r.params.Orders, orders...) - return r -} - -func (r *CreateMultiOrderRequest) Do(ctx context.Context) (multiOrderResponse *MultiOrderResponse, err error) { - req, err := r.client.newAuthenticatedRequest("POST", "v2/orders/multi/onebyone", r.params) - if err != nil { - return multiOrderResponse, errors.Wrapf(err, "order create error") - } - - response, err := r.client.sendRequest(req) - if err != nil { - return multiOrderResponse, err - } - - multiOrderResponse = &MultiOrderResponse{} - if errJson := response.DecodeJSON(multiOrderResponse); errJson != nil { - return multiOrderResponse, errJson - } - - return multiOrderResponse, err -} - -func (s *OrderService) NewCreateMultiOrderRequest() *CreateMultiOrderRequest { - return &CreateMultiOrderRequest{client: s.client} -} - -type CreateOrderRequestParams struct { - *PrivateRequestParams - - Market string `json:"market"` - Volume string `json:"volume"` - Price string `json:"price,omitempty"` - StopPrice string `json:"stop_price,omitempty"` - Side string `json:"side"` - OrderType string `json:"ord_type"` - ClientOrderID string `json:"client_oid,omitempty"` - GroupID string `json:"group_id,omitempty"` -} - -type CreateOrderRequest struct { - client *RestClient - - params CreateOrderRequestParams -} - -func (r *CreateOrderRequest) Market(market string) *CreateOrderRequest { - r.params.Market = market - return r -} - -func (r *CreateOrderRequest) Volume(volume string) *CreateOrderRequest { - r.params.Volume = volume - return r +type SubmitOrder struct { + Side string `json:"side"` + Market string `json:"market"` + Price string `json:"price"` + StopPrice string `json:"stop_price,omitempty"` + OrderType OrderType `json:"ord_type"` + Volume string `json:"volume"` + GroupID uint32 `json:"group_id,omitempty"` + ClientOID string `json:"client_oid,omitempty"` } -func (r *CreateOrderRequest) Price(price string) *CreateOrderRequest { - r.params.Price = price - return r -} - -func (r *CreateOrderRequest) StopPrice(price string) *CreateOrderRequest { - r.params.StopPrice = price - return r -} - -func (r *CreateOrderRequest) Side(side string) *CreateOrderRequest { - r.params.Side = side - return r -} - -func (r *CreateOrderRequest) OrderType(orderType string) *CreateOrderRequest { - r.params.OrderType = orderType - return r -} - -func (r *CreateOrderRequest) ClientOrderID(clientOrderID string) *CreateOrderRequest { - r.params.ClientOrderID = clientOrderID - return r -} - -func (r *CreateOrderRequest) Do(ctx context.Context) (order *Order, err error) { - req, err := r.client.newAuthenticatedRequest("POST", "v2/orders", &r.params) - if err != nil { - return order, errors.Wrapf(err, "order create error") - } - - response, err := r.client.sendRequest(req) - if err != nil { - return order, err - } - - order = &Order{} - if err := response.DecodeJSON(order); err != nil { - return nil, err - } - - return order, err -} - -func (s *OrderService) NewCreateOrderRequest() *CreateOrderRequest { - return &CreateOrderRequest{client: s.client} +// Order represents one returned order (POST order/GET order/GET orders) on the max platform. +type Order struct { + ID uint64 `json:"id,omitempty"` + WalletType WalletType `json:"wallet_type,omitempty"` + Side string `json:"side"` + OrderType OrderType `json:"ord_type"` + Price fixedpoint.Value `json:"price,omitempty"` + StopPrice fixedpoint.Value `json:"stop_price,omitempty"` + AveragePrice fixedpoint.Value `json:"avg_price,omitempty"` + State OrderState `json:"state,omitempty"` + Market string `json:"market,omitempty"` + Volume fixedpoint.Value `json:"volume"` + RemainingVolume fixedpoint.Value `json:"remaining_volume,omitempty"` + ExecutedVolume fixedpoint.Value `json:"executed_volume,omitempty"` + TradesCount int64 `json:"trades_count,omitempty"` + GroupID uint32 `json:"group_id,omitempty"` + ClientOID string `json:"client_oid,omitempty"` + CreatedAt types.MillisecondTimestamp `json:"created_at"` } diff --git a/pkg/exchange/max/maxapi/order_test.go b/pkg/exchange/max/maxapi/order_test.go new file mode 100644 index 0000000000..0bfb887daf --- /dev/null +++ b/pkg/exchange/max/maxapi/order_test.go @@ -0,0 +1,24 @@ +package max + +import ( + "os" + "regexp" + "testing" +) + +func maskSecret(s string) string { + re := regexp.MustCompile(`\b(\w{4})\w+\b`) + s = re.ReplaceAllString(s, "$1******") + return s +} + +func integrationTestConfigured(t *testing.T, prefix string) (key, secret string, ok bool) { + var hasKey, hasSecret bool + key, hasKey = os.LookupEnv(prefix + "_API_KEY") + secret, hasSecret = os.LookupEnv(prefix + "_API_SECRET") + ok = hasKey && hasSecret && os.Getenv("TEST_"+prefix) == "1" + if ok { + t.Logf(prefix+" api integration test enabled, key = %s, secret = %s", maskSecret(key), maskSecret(secret)) + } + return key, secret, ok +} diff --git a/pkg/exchange/max/maxapi/public.go b/pkg/exchange/max/maxapi/public.go index 389bf42934..31ff4ad94f 100644 --- a/pkg/exchange/max/maxapi/public.go +++ b/pkg/exchange/max/maxapi/public.go @@ -1,8 +1,8 @@ package max import ( + "context" "fmt" - "io/ioutil" "net/url" "strconv" "strings" @@ -11,6 +11,7 @@ import ( "github.com/pkg/errors" "github.com/valyala/fastjson" + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" ) @@ -19,14 +20,16 @@ type PublicService struct { } type Market struct { - ID string `json:"id"` - Name string `json:"name"` - BaseUnit string `json:"base_unit"` - BaseUnitPrecision int `json:"base_unit_precision"` - QuoteUnit string `json:"quote_unit"` - QuoteUnitPrecision int `json:"quote_unit_precision"` - MinBaseAmount float64 `json:"min_base_amount"` - MinQuoteAmount float64 `json:"min_quote_amount"` + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"market_status"` // active + BaseUnit string `json:"base_unit"` + BaseUnitPrecision int `json:"base_unit_precision"` + QuoteUnit string `json:"quote_unit"` + QuoteUnitPrecision int `json:"quote_unit_precision"` + MinBaseAmount fixedpoint.Value `json:"min_base_amount"` + MinQuoteAmount fixedpoint.Value `json:"min_quote_amount"` + SupportMargin bool `json:"m_wallet_supported"` } type Ticker struct { @@ -45,12 +48,12 @@ type Ticker struct { func (s *PublicService) Timestamp() (serverTimestamp int64, err error) { // sync timestamp with server - req, err := s.client.newRequest("GET", "v2/timestamp", nil, nil) + req, err := s.client.NewRequest(context.Background(), "GET", "v2/timestamp", nil, nil) if err != nil { return 0, err } - response, err := s.client.sendRequest(req) + response, err := s.client.SendRequest(req) if err != nil { return 0, err } @@ -64,12 +67,12 @@ func (s *PublicService) Timestamp() (serverTimestamp int64, err error) { } func (s *PublicService) Markets() ([]Market, error) { - req, err := s.client.newRequest("GET", "v2/markets", url.Values{}, nil) + req, err := s.client.NewRequest(context.Background(), "GET", "v2/markets", url.Values{}, nil) if err != nil { return nil, err } - response, err := s.client.sendRequest(req) + response, err := s.client.SendRequest(req) if err != nil { return nil, err } @@ -84,12 +87,12 @@ func (s *PublicService) Markets() ([]Market, error) { func (s *PublicService) Tickers() (map[string]Ticker, error) { var endPoint = "v2/tickers" - req, err := s.client.newRequest("GET", endPoint, url.Values{}, nil) + req, err := s.client.NewRequest(context.Background(), "GET", endPoint, url.Values{}, nil) if err != nil { return nil, err } - response, err := s.client.sendRequest(req) + response, err := s.client.SendRequest(req) if err != nil { return nil, err } @@ -115,12 +118,12 @@ func (s *PublicService) Tickers() (map[string]Ticker, error) { func (s *PublicService) Ticker(market string) (*Ticker, error) { var endPoint = "v2/tickers/" + market - req, err := s.client.newRequest("GET", endPoint, url.Values{}, nil) + req, err := s.client.NewRequest(context.Background(), "GET", endPoint, url.Values{}, nil) if err != nil { return nil, err } - response, err := s.client.sendRequest(req) + response, err := s.client.SendRequest(req) if err != nil { return nil, err } @@ -206,18 +209,18 @@ type KLine struct { Symbol string Interval string StartTime, EndTime time.Time - Open, High, Low, Close float64 - Volume float64 + Open, High, Low, Close fixedpoint.Value + Volume fixedpoint.Value Closed bool } func (k KLine) KLine() types.KLine { return types.KLine{ - Exchange: "max", + Exchange: types.ExchangeMax, Symbol: strings.ToUpper(k.Symbol), // global symbol Interval: types.Interval(k.Interval), - StartTime: k.StartTime, - EndTime: k.EndTime, + StartTime: types.Time(k.StartTime), + EndTime: types.Time(k.EndTime), Open: k.Open, Close: k.Close, High: k.High, @@ -249,28 +252,17 @@ func (s *PublicService) KLines(symbol string, resolution string, start time.Time queries.Set("limit", strconv.Itoa(limit)) // default to 30, max limit = 10,000 } - req, err := s.client.newRequest("GET", fmt.Sprintf("%s/k", s.client.BaseURL), queries, nil) + req, err := s.client.NewRequest(context.Background(), "GET", fmt.Sprintf("%s/k", s.client.BaseURL), queries, nil) if err != nil { return nil, fmt.Errorf("request build error: %s", err.Error()) } - resp, err := s.client.Do(req) + resp, err := s.client.SendRequest(req) if err != nil { return nil, fmt.Errorf("request failed: %s", err.Error()) } - defer func() { - if err := resp.Body.Close(); err != nil { - logger.WithError(err).Error("failed to close resp body") - } - }() - - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return nil, err - } - - return parseKLines(body, symbol, resolution, interval) + return parseKLines(resp.Body, symbol, resolution, interval) } func parseKLines(payload []byte, symbol, resolution string, interval Interval) (klines []KLine, err error) { @@ -309,11 +301,11 @@ func parseKLines(payload []byte, symbol, resolution string, interval Interval) ( Interval: resolution, StartTime: startTime, EndTime: endTime, - Open: slice[1].GetFloat64(), - High: slice[2].GetFloat64(), - Low: slice[3].GetFloat64(), - Close: slice[4].GetFloat64(), - Volume: slice[5].GetFloat64(), + Open: fixedpoint.NewFromFloat(slice[1].GetFloat64()), + High: fixedpoint.NewFromFloat(slice[2].GetFloat64()), + Low: fixedpoint.NewFromFloat(slice[3].GetFloat64()), + Close: fixedpoint.NewFromFloat(slice[4].GetFloat64()), + Volume: fixedpoint.NewFromFloat(slice[5].GetFloat64()), Closed: isClosed, }) } diff --git a/pkg/exchange/max/maxapi/public_parser.go b/pkg/exchange/max/maxapi/public_parser.go index e1a0d0ae8f..66f0cf88a5 100644 --- a/pkg/exchange/max/maxapi/public_parser.go +++ b/pkg/exchange/max/maxapi/public_parser.go @@ -9,7 +9,6 @@ import ( "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" - "github.com/c9s/bbgo/pkg/util" ) var ErrIncorrectBookEntryElementLength = errors.New("incorrect book entry element length") @@ -42,7 +41,7 @@ func ParseMessage(payload []byte) (interface{}, error) { eventType := string(val.GetStringBytes("e")) switch eventType { case "authenticated": - return nil, nil + return parseAuthEvent(val) case "error": return parseErrorEvent(val) case "subscribed", "unsubscribed": @@ -118,16 +117,16 @@ type KLinePayload struct { func (k KLinePayload) KLine() types.KLine { return types.KLine{ - StartTime: time.Unix(0, k.StartTime*int64(time.Millisecond)), - EndTime: time.Unix(0, k.EndTime*int64(time.Millisecond)), + StartTime: types.Time(time.Unix(0, k.StartTime*int64(time.Millisecond))), + EndTime: types.Time(time.Unix(0, k.EndTime*int64(time.Millisecond))), Symbol: k.Market, Interval: types.Interval(k.Resolution), - Open: util.MustParseFloat(k.Open), - Close: util.MustParseFloat(k.Close), - High: util.MustParseFloat(k.High), - Low: util.MustParseFloat(k.Low), - Volume: util.MustParseFloat(k.Volume), - QuoteVolume: 0, // TODO: add this from kingfisher + Open: fixedpoint.MustNewFromString(k.Open), + Close: fixedpoint.MustNewFromString(k.Close), + High: fixedpoint.MustNewFromString(k.High), + Low: fixedpoint.MustNewFromString(k.Low), + Volume: fixedpoint.MustNewFromString(k.Volume), + QuoteVolume: fixedpoint.Zero, // TODO: add this from kingfisher LastTradeID: uint64(k.LastTradeID), NumberOfTrades: 0, // TODO: add this from kingfisher Closed: k.Closed, @@ -174,7 +173,7 @@ func (e *BookEvent) Time() time.Time { return time.Unix(0, e.Timestamp*int64(time.Millisecond)) } -func (e *BookEvent) OrderBook() (snapshot types.OrderBook, err error) { +func (e *BookEvent) OrderBook() (snapshot types.SliceOrderBook, err error) { snapshot.Symbol = strings.ToUpper(e.Market) for _, bid := range e.Bids { @@ -211,11 +210,11 @@ func parseKLineEvent(val *fastjson.Value) (*KLineEvent, error) { Interval: string(val.GetStringBytes("k", "R")), StartTime: time.Unix(0, val.GetInt64("k", "ST")*int64(time.Millisecond)), EndTime: time.Unix(0, val.GetInt64("k", "ET")*int64(time.Millisecond)), - Open: util.MustParseFloat(string(val.GetStringBytes("k", "O"))), - High: util.MustParseFloat(string(val.GetStringBytes("k", "H"))), - Low: util.MustParseFloat(string(val.GetStringBytes("k", "L"))), - Close: util.MustParseFloat(string(val.GetStringBytes("k", "C"))), - Volume: util.MustParseFloat(string(val.GetStringBytes("k", "v"))), + Open: fixedpoint.MustNewFromBytes(val.GetStringBytes("k", "O")), + High: fixedpoint.MustNewFromBytes(val.GetStringBytes("k", "H")), + Low: fixedpoint.MustNewFromBytes(val.GetStringBytes("k", "L")), + Close: fixedpoint.MustNewFromBytes(val.GetStringBytes("k", "C")), + Volume: fixedpoint.MustNewFromBytes(val.GetStringBytes("k", "v")), Closed: val.GetBool("k", "x"), } diff --git a/pkg/exchange/max/maxapi/restapi.go b/pkg/exchange/max/maxapi/restapi.go index 27d14e3f8f..c9f1a0a3eb 100644 --- a/pkg/exchange/max/maxapi/restapi.go +++ b/pkg/exchange/max/maxapi/restapi.go @@ -1,36 +1,60 @@ package max import ( - "bytes" + "context" "crypto/hmac" "crypto/sha256" "encoding/base64" "encoding/hex" "encoding/json" "fmt" - "io/ioutil" "math" + "net" "net/http" "net/url" - "reflect" "regexp" - "strconv" + "strings" "sync/atomic" "time" + "github.com/c9s/requestgen" "github.com/pkg/errors" log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/util" + "github.com/c9s/bbgo/pkg/version" ) const ( // ProductionAPIURL is the official MAX API v2 Endpoint ProductionAPIURL = "https://max-api.maicoin.com/api/v2" - UserAgent = "bbgo/1.0" + UserAgent = "bbgo/" + version.Version + + defaultHTTPTimeout = time.Second * 60 - defaultHTTPTimeout = time.Second * 15 + // 2018-09-01 08:00:00 +0800 CST + TimestampSince = 1535760000 ) +var httpTransportMaxIdleConnsPerHost = http.DefaultMaxIdleConnsPerHost +var httpTransportMaxIdleConns = 100 +var httpTransportIdleConnTimeout = 90 * time.Second + +func init() { + + if val, ok := util.GetEnvVarInt("HTTP_TRANSPORT_MAX_IDLE_CONNS_PER_HOST"); ok { + httpTransportMaxIdleConnsPerHost = val + } + + if val, ok := util.GetEnvVarInt("HTTP_TRANSPORT_MAX_IDLE_CONNS"); ok { + httpTransportMaxIdleConns = val + } + if val, ok := util.GetEnvVarDuration("HTTP_TRANSPORT_IDLE_CONN_TIMEOUT"); ok { + httpTransportIdleConnTimeout = val + } +} + var logger = log.WithField("exchange", "max") var htmlTagPattern = regexp.MustCompile("<[/]?[a-zA-Z-]+.*?>") @@ -44,90 +68,71 @@ var timeOffset int64 = 0 var serverTimestamp = time.Now().Unix() // reqCount is used for nonce, this variable counts the API request count. -var reqCount int64 = 0 - -// Response is wrapper for standard http.Response and provides -// more methods. -type Response struct { - *http.Response - - // Body overrides the composited Body field. - Body []byte +var reqCount int64 = 1 + +// create an isolated http httpTransport rather than the default one +var httpTransport = &http.Transport{ + Proxy: http.ProxyFromEnvironment, + DialContext: (&net.Dialer{ + Timeout: 10 * time.Second, + KeepAlive: 30 * time.Second, + }).DialContext, + ForceAttemptHTTP2: true, + MaxIdleConns: httpTransportMaxIdleConns, + MaxIdleConnsPerHost: httpTransportMaxIdleConnsPerHost, + IdleConnTimeout: httpTransportIdleConnTimeout, + TLSHandshakeTimeout: 10 * time.Second, + ExpectContinueTimeout: 1 * time.Second, } -// newResponse is a wrapper of the http.Response instance, it reads the response body and close the file. -func newResponse(r *http.Response) (response *Response, err error) { - body, err := ioutil.ReadAll(r.Body) - if err != nil { - return nil, err - } - - err = r.Body.Close() - response = &Response{Response: r, Body: body} - return response, err +var defaultHttpClient = &http.Client{ + Timeout: defaultHTTPTimeout, + Transport: httpTransport, } -// String converts response body to string. -// An empty string will be returned if error. -func (r *Response) String() string { - return string(r.Body) -} +type RestClient struct { + requestgen.BaseAPIClient -func (r *Response) DecodeJSON(o interface{}) error { - return json.Unmarshal(r.Body, o) -} + APIKey, APISecret string -type RestClient struct { - client *http.Client - - BaseURL *url.URL - - // Authentication - APIKey string - APISecret string - - AccountService *AccountService - PublicService *PublicService - TradeService *TradeService - OrderService *OrderService - // OrderBookService *OrderBookService - // MaxTokenService *MaxTokenService - // MaxKLineService *KLineService - // CreditService *CreditService + AccountService *AccountService + PublicService *PublicService + TradeService *TradeService + OrderService *OrderService + RewardService *RewardService + WithdrawalService *WithdrawalService } -func NewRestClientWithHttpClient(baseURL string, httpClient *http.Client) *RestClient { +func NewRestClient(baseURL string) *RestClient { u, err := url.Parse(baseURL) if err != nil { panic(err) } var client = &RestClient{ - client: httpClient, - BaseURL: u, + BaseAPIClient: requestgen.BaseAPIClient{ + HttpClient: defaultHttpClient, + BaseURL: u, + }, } client.AccountService = &AccountService{client} client.TradeService = &TradeService{client} client.PublicService = &PublicService{client} client.OrderService = &OrderService{client} - // client.OrderBookService = &OrderBookService{client} - // client.MaxTokenService = &MaxTokenService{client} - // client.MaxKLineService = &KLineService{client} - // client.CreditService = &CreditService{client} + client.RewardService = &RewardService{client} + client.WithdrawalService = &WithdrawalService{client} + + // defaultHttpClient.MaxTokenService = &MaxTokenService{defaultHttpClient} client.initNonce() return client } -func NewRestClient(baseURL string) *RestClient { - return NewRestClientWithHttpClient(baseURL, &http.Client{ - Timeout: defaultHTTPTimeout, - }) -} - // Auth sets api key and secret for usage is requests that requires authentication. func (c *RestClient) Auth(key string, secret string) *RestClient { + // pragma: allowlist nextline secret c.APIKey = key + // pragma: allowlist nextline secret c.APISecret = secret return c } @@ -137,97 +142,71 @@ func (c *RestClient) initNonce() { var err error serverTimestamp, err = c.PublicService.Timestamp() if err != nil { - logger.WithError(err).Panic("failed to sync timestamp with Max") + logger.WithError(err).Panic("failed to sync timestamp with max") } - // 1 is for the request count mod 0.000 to 0.999 - timeOffset = serverTimestamp - clientTime.Unix() - 1 - + timeOffset = serverTimestamp - clientTime.Unix() logger.Infof("loaded max server timestamp: %d offset=%d", serverTimestamp, timeOffset) } func (c *RestClient) getNonce() int64 { + // nonce 是以正整數表示的時間戳記,代表了從 Unix epoch 到當前時間所經過的毫秒數(ms)。 + // nonce 與伺服器的時間差不得超過正負30秒,每個 nonce 只能使用一次。 var seconds = time.Now().Unix() var rc = atomic.AddInt64(&reqCount, 1) - return (seconds+timeOffset)*1000 + int64(math.Mod(float64(rc), 1000.0)) + return (seconds+timeOffset)*1000 - 1 + int64(math.Mod(float64(rc), 1000.0)) } -// NewRequest create new API request. Relative url can be provided in refURL. -func (c *RestClient) newRequest(method string, refURL string, params url.Values, body []byte) (*http.Request, error) { - rel, err := url.Parse(refURL) - if err != nil { - return nil, err - } - if params != nil { - rel.RawQuery = params.Encode() - } - var req *http.Request - u := c.BaseURL.ResolveReference(rel) +func (c *RestClient) NewAuthenticatedRequest(ctx context.Context, m string, refURL string, params url.Values, payload interface{}) (*http.Request, error) { + return c.newAuthenticatedRequest(ctx, m, refURL, params, payload, nil) +} - req, err = http.NewRequest(method, u.String(), bytes.NewReader(body)) - if err != nil { - return nil, err +// newAuthenticatedRequest creates new http request for authenticated routes. +func (c *RestClient) newAuthenticatedRequest(ctx context.Context, m string, refURL string, params url.Values, data interface{}, rel *url.URL) (*http.Request, error) { + if len(c.APIKey) == 0 { + return nil, errors.New("empty api key") } - req.Header.Add("User-Agent", UserAgent) - return req, nil -} + if len(c.APISecret) == 0 { + return nil, errors.New("empty api secret") + } -// newAuthenticatedRequest creates new http request for authenticated routes. -func (c *RestClient) newAuthenticatedRequest(m string, refURL string, data interface{}) (*http.Request, error) { - rel, err := url.Parse(refURL) - if err != nil { - return nil, err + var err error + if rel == nil { + rel, err = url.Parse(refURL) + if err != nil { + return nil, err + } } var p []byte + var payload = map[string]interface{}{ + "nonce": c.getNonce(), + "path": c.BaseURL.ResolveReference(rel).Path, + } switch d := data.(type) { - - case nil: - payload := map[string]interface{}{ - "nonce": c.getNonce(), - "path": c.BaseURL.ResolveReference(rel).Path, - } - p, err = json.Marshal(payload) - case map[string]interface{}: - payload := map[string]interface{}{ - "nonce": c.getNonce(), - "path": c.BaseURL.ResolveReference(rel).Path, - } - for k, v := range d { payload[k] = v } + } - p, err = json.Marshal(payload) - - default: - params, err := getPrivateRequestParamsObject(data) - if err != nil { - return nil, errors.Wrapf(err, "unsupported payload type: %T", d) + for k, vs := range params { + k = strings.TrimSuffix(k, "[]") + if len(vs) == 1 { + payload[k] = vs[0] + } else { + payload[k] = vs } - - params.Nonce = c.getNonce() - params.Path = c.BaseURL.ResolveReference(rel).Path - - p, err = json.Marshal(d) } + p, err = castPayload(payload) if err != nil { return nil, err } - if len(c.APIKey) == 0 { - return nil, errors.New("empty api key") - } - - if len(c.APISecret) == 0 { - return nil, errors.New("empty api secret") - } - - req, err := c.newRequest(m, refURL, nil, p) + req, err := c.NewRequest(ctx, m, refURL, params, p) if err != nil { return nil, err } @@ -235,7 +214,6 @@ func (c *RestClient) newAuthenticatedRequest(m string, refURL string, data inter encoded := base64.StdEncoding.EncodeToString(p) req.Header.Add("Content-Type", "application/json") - req.Header.Add("Accept", "application/json") req.Header.Add("X-MAX-ACCESSKEY", c.APIKey) req.Header.Add("X-MAX-PAYLOAD", encoded) req.Header.Add("X-MAX-SIGNATURE", signPayload(encoded, c.APISecret)) @@ -243,127 +221,13 @@ func (c *RestClient) newAuthenticatedRequest(m string, refURL string, data inter return req, nil } -func getPrivateRequestParamsObject(v interface{}) (*PrivateRequestParams, error) { - vt := reflect.ValueOf(v) - - if vt.Kind() == reflect.Ptr { - vt = vt.Elem() - } - - if vt.Kind() != reflect.Struct { - return nil, errors.New("reflect error: given object is not a struct" + vt.Kind().String()) - } - - if !vt.CanSet() { - return nil, errors.New("reflect error: can not set object") - } - - field := vt.FieldByName("PrivateRequestParams") - if !field.IsValid() { - return nil, errors.New("reflect error: field PrivateRequestParams not found") - } - - if field.IsNil() { - field.Set(reflect.ValueOf(&PrivateRequestParams{})) - } - - params, ok := field.Interface().(*PrivateRequestParams) - if !ok { - return nil, errors.New("reflect error: failed to cast value to *PrivateRequestParams") - } - - return params, nil -} - -func signPayload(payload string, secret string) string { - var sig = hmac.New(sha256.New, []byte(secret)) - _, err := sig.Write([]byte(payload)) - if err != nil { - return "" - } - return hex.EncodeToString(sig.Sum(nil)) -} - -func (c *RestClient) Do(req *http.Request) (resp *http.Response, err error) { - req.Header.Set("User-Agent", UserAgent) - return c.client.Do(req) -} - -// sendRequest sends the request to the API server and handle the response -func (c *RestClient) sendRequest(req *http.Request) (*Response, error) { - resp, err := c.Do(req) +func (c *RestClient) sendAuthenticatedRequest(m string, refURL string, data map[string]interface{}) (*requestgen.Response, error) { + req, err := c.newAuthenticatedRequest(nil, m, refURL, nil, data, nil) if err != nil { return nil, err } - // newResponse reads the response body and return a new Response object - response, err := newResponse(resp) - if err != nil { - return response, err - } - - // Check error, if there is an error, return the ErrorResponse struct type - if isError(response) { - errorResponse, err := toErrorResponse(response) - if err != nil { - return response, err - } - return response, errorResponse - } - - return response, nil -} - -func (c *RestClient) sendAuthenticatedRequest(m string, refURL string, data map[string]interface{}) (*Response, error) { - req, err := c.newAuthenticatedRequest(m, refURL, data) - if err != nil { - return nil, err - } - response, err := c.sendRequest(req) - if err != nil { - return nil, err - } - return response, err -} - -// FIXME: should deprecate the polling usage from the websocket struct -func (c *RestClient) GetTrades(market string, lastTradeID int64) ([]byte, error) { - params := url.Values{} - params.Add("market", market) - if lastTradeID > 0 { - params.Add("from", strconv.Itoa(int(lastTradeID))) - } - - return c.get("/trades", params) -} - -// get sends GET http request to the api endpoint, the urlPath must start with a slash '/' -func (c *RestClient) get(urlPath string, values url.Values) ([]byte, error) { - var reqURL = c.BaseURL.String() + urlPath - - // Create request - req, err := http.NewRequest("GET", reqURL, nil) - if err != nil { - return nil, fmt.Errorf("could not init request: %s", err.Error()) - } - - req.URL.RawQuery = values.Encode() - req.Header.Add("User-Agent", UserAgent) - - // Execute request - resp, err := c.client.Do(req) - if err != nil { - return nil, fmt.Errorf("could not execute request: %s", err.Error()) - } - defer resp.Body.Close() - - // Load request - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("could not read response: %s", err.Error()) - } - - return body, nil + return c.SendRequest(req) } // ErrorResponse is the custom error type that is returned if the API returns an @@ -374,7 +238,7 @@ type ErrorField struct { } type ErrorResponse struct { - *Response + *requestgen.Response Err ErrorField `json:"error"` } @@ -388,14 +252,8 @@ func (r *ErrorResponse) Error() string { ) } -// isError check the response status code so see if a response is an error. -func isError(response *Response) bool { - var c = response.StatusCode - return c < 200 || c > 299 -} - -// toErrorResponse tries to convert/parse the server response to the standard Error interface object -func toErrorResponse(response *Response) (errorResponse *ErrorResponse, err error) { +// ToErrorResponse tries to convert/parse the server response to the standard Error interface object +func ToErrorResponse(response *requestgen.Response) (errorResponse *ErrorResponse, err error) { errorResponse = &ErrorResponse{Response: response} contentType := response.Header.Get("content-type") @@ -410,7 +268,36 @@ func toErrorResponse(response *Response) (errorResponse *ErrorResponse, err erro // convert 5xx error from the HTML page to the ErrorResponse errorResponse.Err.Message = htmlTagPattern.ReplaceAllLiteralString(string(response.Body), "") return errorResponse, nil + case "text/plain": + errorResponse.Err.Message = string(response.Body) + return errorResponse, nil } return errorResponse, fmt.Errorf("unexpected response content type %s", contentType) } + +func castPayload(payload interface{}) ([]byte, error) { + if payload == nil { + return nil, nil + } + + switch v := payload.(type) { + case string: + return []byte(v), nil + + case []byte: + return v, nil + } + + body, err := json.Marshal(payload) + return body, err +} + +func signPayload(payload string, secret string) string { + var sig = hmac.New(sha256.New, []byte(secret)) + _, err := sig.Write([]byte(payload)) + if err != nil { + return "" + } + return hex.EncodeToString(sig.Sum(nil)) +} diff --git a/pkg/exchange/max/maxapi/reward.go b/pkg/exchange/max/maxapi/reward.go new file mode 100644 index 0000000000..685580e055 --- /dev/null +++ b/pkg/exchange/max/maxapi/reward.go @@ -0,0 +1,169 @@ +package max + +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +type RewardType string + +const ( + RewardAirdrop = RewardType("airdrop_reward") + RewardCommission = RewardType("commission") + RewardHolding = RewardType("holding_reward") + RewardMining = RewardType("mining_reward") + RewardTrading = RewardType("trading_reward") + RewardRedemption = RewardType("redemption_reward") + RewardVipRebate = RewardType("vip_rebate") +) + +func ParseRewardType(s string) (RewardType, error) { + switch s { + case "airdrop_reward": + return RewardAirdrop, nil + case "commission": + return RewardCommission, nil + case "holding_reward": + return RewardHolding, nil + case "mining_reward": + return RewardMining, nil + case "trading_reward": + return RewardTrading, nil + case "vip_rebate": + return RewardVipRebate, nil + case "redemption_reward": + return RewardRedemption, nil + + } + + return RewardType(""), fmt.Errorf("unknown reward type: %s", s) +} + +func (t *RewardType) UnmarshalJSON(o []byte) error { + var s string + var err = json.Unmarshal(o, &s) + if err != nil { + return err + } + + rt, err := ParseRewardType(s) + if err != nil { + return err + } + + *t = rt + return nil +} + +func (t RewardType) RewardType() (types.RewardType, error) { + switch t { + + case RewardAirdrop: + return types.RewardAirdrop, nil + + case RewardCommission: + return types.RewardCommission, nil + + case RewardHolding: + return types.RewardHolding, nil + + case RewardMining: + return types.RewardMining, nil + + case RewardTrading: + return types.RewardTrading, nil + + case RewardVipRebate: + return types.RewardVipRebate, nil + + } + + return types.RewardType(""), fmt.Errorf("unknown reward type: %s", t) +} + +type Reward struct { + // UUID here is more like SN, not the real UUID + UUID string `json:"uuid"` + Type RewardType `json:"type"` + Currency string `json:"currency"` + Amount fixedpoint.Value `json:"amount"` + State string `json:"state"` + Note string `json:"note"` + + // Unix timestamp in seconds + CreatedAt types.Timestamp `json:"created_at"` +} + +func (reward Reward) Reward() (*types.Reward, error) { + rt, err := reward.Type.RewardType() + if err != nil { + return nil, err + } + + return &types.Reward{ + UUID: reward.UUID, + Exchange: types.ExchangeMax, + Type: rt, + Currency: strings.ToUpper(reward.Currency), + Quantity: reward.Amount, + State: reward.State, + Note: reward.Note, + Spent: false, + CreatedAt: types.Time(reward.CreatedAt), + }, nil +} + +type RewardService struct { + client *RestClient +} + +func (s *RewardService) NewGetRewardsRequest() *GetRewardsRequest { + return &GetRewardsRequest{client: s.client} +} + +func (s *RewardService) NewGetRewardsOfTypeRequest(pathType RewardType) *GetRewardsOfTypeRequest { + return &GetRewardsOfTypeRequest{client: s.client, pathType: &pathType} +} + +//go:generate GetRequest -url "v2/rewards/:path_type" -type GetRewardsOfTypeRequest -responseType []Reward +type GetRewardsOfTypeRequest struct { + client requestgen.AuthenticatedAPIClient + + pathType *RewardType `param:"path_type,slug"` + + // From Unix-timestamp + from *int64 `param:"from"` + + // To Unix-timestamp + to *int64 `param:"to"` + + page *int64 `param:"page"` + limit *int64 `param:"limit"` + offset *int64 `param:"offset"` +} + +//go:generate GetRequest -url "v2/rewards" -type GetRewardsRequest -responseType []Reward +type GetRewardsRequest struct { + client requestgen.AuthenticatedAPIClient + + currency *string `param:"currency"` + + // From Unix-timestamp + from *int64 `param:"from"` + + // To Unix-timestamp + to *int64 `param:"to"` + + page *int64 `param:"page"` + limit *int64 `param:"limit"` + offset *int64 `param:"offset"` +} diff --git a/pkg/exchange/max/maxapi/reward_test.go b/pkg/exchange/max/maxapi/reward_test.go new file mode 100644 index 0000000000..cb89066a31 --- /dev/null +++ b/pkg/exchange/max/maxapi/reward_test.go @@ -0,0 +1,52 @@ +package max + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRewardService_GetRewardsRequest(t *testing.T) { + key, secret, ok := integrationTestConfigured(t, "MAX") + if !ok { + t.SkipNow() + } + + ctx := context.Background() + + client := NewRestClient(ProductionAPIURL) + client.Auth(key, secret) + + req := client.RewardService.NewGetRewardsRequest() + rewards, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotNil(t, rewards) + assert.NotEmpty(t, rewards) + + t.Logf("rewards: %+v", rewards) +} + +func TestRewardService_GetRewardsOfTypeRequest(t *testing.T) { + key, secret, ok := integrationTestConfigured(t, "MAX") + if !ok { + t.SkipNow() + } + + ctx := context.Background() + + client := NewRestClient(ProductionAPIURL) + client.Auth(key, secret) + + req := client.RewardService.NewGetRewardsOfTypeRequest(RewardCommission) + rewards, err := req.Do(ctx) + assert.NoError(t, err) + assert.NotNil(t, rewards) + assert.NotEmpty(t, rewards) + + t.Logf("rewards: %+v", rewards) + + for _, reward := range rewards { + assert.Equal(t, RewardCommission, reward.Type) + } +} diff --git a/pkg/exchange/max/maxapi/trade.go b/pkg/exchange/max/maxapi/trade.go index 569f76bdf4..bbc61f456d 100644 --- a/pkg/exchange/max/maxapi/trade.go +++ b/pkg/exchange/max/maxapi/trade.go @@ -1,9 +1,17 @@ package max +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST + import ( - "context" "net/url" "strconv" + "time" + + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" ) type MarkerInfo struct { @@ -19,25 +27,28 @@ type TradeInfo struct { Ask *MarkerInfo `json:"ask,omitempty"` } +type Liquidity string + // Trade represents one returned trade on the max platform. type Trade struct { - ID uint64 `json:"id" db:"exchange_id"` - Price string `json:"price" db:"price"` - Volume string `json:"volume" db:"volume"` - Funds string `json:"funds"` - Market string `json:"market" db:"market"` - MarketName string `json:"market_name"` - CreatedAt int64 `json:"created_at"` - CreatedAtMilliSeconds int64 `json:"created_at_in_ms"` - Side string `json:"side" db:"side"` - OrderID uint64 `json:"order_id"` - Fee string `json:"fee" db:"fee"` // float number as string - FeeCurrency string `json:"fee_currency" db:"fee_currency"` - Info TradeInfo `json:"info,omitempty"` + ID uint64 `json:"id" db:"exchange_id"` + WalletType WalletType `json:"wallet_type,omitempty"` + Price fixedpoint.Value `json:"price"` + Volume fixedpoint.Value `json:"volume"` + Funds fixedpoint.Value `json:"funds"` + Market string `json:"market"` + MarketName string `json:"market_name"` + CreatedAt types.MillisecondTimestamp `json:"created_at"` + Side string `json:"side"` + OrderID uint64 `json:"order_id"` + Fee fixedpoint.Value `json:"fee"` // float number as string + FeeCurrency string `json:"fee_currency"` + Liquidity Liquidity `json:"liquidity"` + Info TradeInfo `json:"info,omitempty"` } func (t Trade) IsBuyer() bool { - return t.Side == "bid" + return t.Side == "bid" || t.Side == "buy" } func (t Trade) IsMaker() bool { @@ -109,27 +120,8 @@ func (options *QueryTradeOptions) Params() url.Values { return params } -func (s *TradeService) MyTrades(options QueryTradeOptions) ([]Trade, error) { - req, err := s.client.newAuthenticatedRequest("GET", "v2/trades/my", options.Map()) - if err != nil { - return nil, err - } - - response, err := s.client.sendRequest(req) - if err != nil { - return nil, err - } - - var v []Trade - if err := response.DecodeJSON(&v); err != nil { - return nil, err - } - - return v, nil -} - -func (s *TradeService) NewPrivateTradeRequest() *PrivateTradeRequest { - return &PrivateTradeRequest{client: s.client} +func (s *TradeService) NewGetPrivateTradeRequest() *GetPrivateTradesRequest { + return &GetPrivateTradesRequest{client: s.client} } type PrivateRequestParams struct { @@ -137,105 +129,26 @@ type PrivateRequestParams struct { Path string `json:"path"` } -type PrivateTradeRequestParams struct { - *PrivateRequestParams +//go:generate GetRequest -url "v2/trades/my" -type GetPrivateTradesRequest -responseType []Trade +type GetPrivateTradesRequest struct { + client requestgen.AuthenticatedAPIClient - Market string `json:"market"` + market string `param:"market"` // nolint:golint,structcheck - // Timestamp is the seconds elapsed since Unix epoch, set to return trades executed before the time only - Timestamp int `json:"timestamp,omitempty"` + // timestamp is the seconds elapsed since Unix epoch, set to return trades executed before the time only + timestamp *time.Time `param:"timestamp,seconds"` // nolint:golint,structcheck // From field is a trade id, set ot return trades created after the trade - From int64 `json:"from,omitempty"` + from *int64 `param:"from"` // nolint:golint,structcheck // To field trade id, set to return trades created before the trade - To int64 `json:"to,omitempty"` - - OrderBy string `json:"order_by,omitempty"` - - // default to false - Pagination bool `json:"pagination"` - - Limit int64 `json:"limit,omitempty"` - - Offset int64 `json:"offset,omitempty"` -} - -type PrivateTradeRequest struct { - client *RestClient - params PrivateTradeRequestParams -} - -func (r *PrivateTradeRequest) Market(market string) *PrivateTradeRequest { - r.params.Market = market - return r -} - -func (r *PrivateTradeRequest) From(from int64) *PrivateTradeRequest { - r.params.From = from - return r -} - -func (r *PrivateTradeRequest) To(to int64) *PrivateTradeRequest { - r.params.To = to - return r -} - -func (r *PrivateTradeRequest) Limit(limit int64) *PrivateTradeRequest { - r.params.Limit = limit - return r -} - -func (r *PrivateTradeRequest) Offset(offset int64) *PrivateTradeRequest { - r.params.Offset = offset - return r -} - -func (r *PrivateTradeRequest) Pagination(p bool) *PrivateTradeRequest { - r.params.Pagination = p - return r -} + to *int64 `param:"to"` // nolint:golint,structcheck -func (r *PrivateTradeRequest) OrderBy(orderBy string) *PrivateTradeRequest { - r.params.OrderBy = orderBy - return r -} + orderBy *string `param:"order_by"` -func (r *PrivateTradeRequest) Do(ctx context.Context) (trades []Trade, err error) { - req, err := r.client.newAuthenticatedRequest("GET", "v2/trades/my", &r.params) - if err != nil { - return trades, err - } + pagination *bool `param:"pagination"` - response, err := r.client.sendRequest(req) - if err != nil { - return trades, err - } - - if err := response.DecodeJSON(&trades); err != nil { - return trades, err - } - - return trades, err -} - -func (s *TradeService) Trades(options QueryTradeOptions) ([]Trade, error) { - var params = options.Params() - - req, err := s.client.newRequest("GET", "v2/trades", params, nil) - if err != nil { - return nil, err - } - - response, err := s.client.sendRequest(req) - if err != nil { - return nil, err - } - - var v []Trade - if err := response.DecodeJSON(&v); err != nil { - return nil, err - } + limit *int64 `param:"limit"` - return v, nil + offset *int64 `param:"offset"` } diff --git a/pkg/exchange/max/maxapi/userdata.go b/pkg/exchange/max/maxapi/userdata.go index 95bcc916ee..8fe3e3c5d0 100644 --- a/pkg/exchange/max/maxapi/userdata.go +++ b/pkg/exchange/max/maxapi/userdata.go @@ -1,9 +1,12 @@ package max import ( + "encoding/json" + "fmt" "strings" "github.com/pkg/errors" + log "github.com/sirupsen/logrus" "github.com/valyala/fastjson" "github.com/c9s/bbgo/pkg/fixedpoint" @@ -21,22 +24,23 @@ type OrderUpdate struct { Side string `json:"sd"` OrderType OrderType `json:"ot"` - Price string `json:"p"` - StopPrice string `json:"sp"` + Price fixedpoint.Value `json:"p"` + StopPrice fixedpoint.Value `json:"sp"` - Volume string `json:"v"` - AveragePrice string `json:"ap"` - State OrderState `json:"S"` - Market string `json:"M"` + Volume fixedpoint.Value `json:"v"` + AveragePrice fixedpoint.Value `json:"ap"` + State OrderState `json:"S"` + Market string `json:"M"` - RemainingVolume string `json:"rv"` - ExecutedVolume string `json:"ev"` + RemainingVolume fixedpoint.Value `json:"rv"` + ExecutedVolume fixedpoint.Value `json:"ev"` TradesCount int64 `json:"tc"` - GroupID int64 `json:"gi"` + GroupID uint32 `json:"gi"` ClientOID string `json:"ci"` CreatedAtMs int64 `json:"T"` + UpdateTime int64 `json:"TU"` } type OrderUpdateEvent struct { @@ -45,35 +49,20 @@ type OrderUpdateEvent struct { Orders []OrderUpdate `json:"o"` } -func parserOrderUpdate(v *fastjson.Value) OrderUpdate { - return OrderUpdate{ - Event: string(v.GetStringBytes("e")), - ID: v.GetUint64("i"), - Side: string(v.GetStringBytes("sd")), - Market: string(v.GetStringBytes("M")), - OrderType: OrderType(v.GetStringBytes("ot")), - State: OrderState(v.GetStringBytes("S")), - Price: string(v.GetStringBytes("p")), - StopPrice: string(v.GetStringBytes("sp")), - AveragePrice: string(v.GetStringBytes("ap")), - Volume: string(v.GetStringBytes("v")), - RemainingVolume: string(v.GetStringBytes("rv")), - ExecutedVolume: string(v.GetStringBytes("ev")), - TradesCount: v.GetInt64("tc"), - GroupID: v.GetInt64("gi"), - ClientOID: string(v.GetStringBytes("ci")), - CreatedAtMs: v.GetInt64("T"), - } -} - func parseOrderUpdateEvent(v *fastjson.Value) *OrderUpdateEvent { var e OrderUpdateEvent e.Event = string(v.GetStringBytes("e")) e.Timestamp = v.GetInt64("T") for _, ov := range v.GetArray("o") { - o := parserOrderUpdate(ov) - e.Orders = append(e.Orders, o) + var o = ov.String() + var u OrderUpdate + if err := json.Unmarshal([]byte(o), &u); err != nil { + log.WithError(err).Error("parse error") + continue + } + + e.Orders = append(e.Orders, u) } return &e @@ -91,8 +80,14 @@ func parserOrderSnapshotEvent(v *fastjson.Value) *OrderSnapshotEvent { e.Timestamp = v.GetInt64("T") for _, ov := range v.GetArray("o") { - o := parserOrderUpdate(ov) - e.Orders = append(e.Orders, o) + var o = ov.String() + var u OrderUpdate + if err := json.Unmarshal([]byte(o), &u); err != nil { + log.WithError(err).Error("parse error") + continue + } + + e.Orders = append(e.Orders, u) } return &e @@ -108,6 +103,7 @@ type TradeUpdate struct { Fee string `json:"f"` FeeCurrency string `json:"fc"` Timestamp int64 `json:"T"` + UpdateTime int64 `json:"TU"` OrderID uint64 `json:"oi"` @@ -124,6 +120,7 @@ func parseTradeUpdate(v *fastjson.Value) TradeUpdate { Fee: string(v.GetStringBytes("f")), FeeCurrency: string(v.GetStringBytes("fc")), Timestamp: v.GetInt64("T"), + UpdateTime: v.GetInt64("TU"), OrderID: v.GetUint64("oi"), Maker: v.GetBool("m"), } @@ -168,102 +165,109 @@ func parseTradeSnapshotEvent(v *fastjson.Value) *TradeSnapshotEvent { } type BalanceMessage struct { - Currency string `json:"cu"` - Available string `json:"av"` - Locked string `json:"l"` + Currency string `json:"cu"` + Available fixedpoint.Value `json:"av"` + Locked fixedpoint.Value `json:"l"` } func (m *BalanceMessage) Balance() (*types.Balance, error) { - available, err := fixedpoint.NewFromString(m.Available) - if err != nil { - return nil, err - } - - locked, err := fixedpoint.NewFromString(m.Locked) - if err != nil { - return nil, err - } - return &types.Balance{ Currency: strings.ToUpper(m.Currency), - Locked: locked, - Available: available, + Locked: m.Locked, + Available: m.Available, }, nil } -func parseBalance(v *fastjson.Value) BalanceMessage { - return BalanceMessage{ - Currency: string(v.GetStringBytes("cu")), - Available: string(v.GetStringBytes("av")), - Locked: string(v.GetStringBytes("l")), - } +type AccountUpdateEvent struct { + BaseEvent + Balances []BalanceMessage `json:"B"` } -type AccountUpdateEvent struct { +type AccountSnapshotEvent struct { BaseEvent Balances []BalanceMessage `json:"B"` } -func parserAccountUpdateEvent(v *fastjson.Value) *AccountUpdateEvent { - var e AccountUpdateEvent - e.Event = string(v.GetStringBytes("e")) - e.Timestamp = v.GetInt64("T") +func parseAuthEvent(v *fastjson.Value) (*AuthEvent, error) { + var e AuthEvent + var err = json.Unmarshal([]byte(v.String()), &e) + return &e, err +} - for _, bv := range v.GetArray("B") { - e.Balances = append(e.Balances, parseBalance(bv)) - } +type ADRatio struct { + ADRatio fixedpoint.Value `json:"ad"` + AssetInUSDT fixedpoint.Value `json:"as"` + DebtInUSDT fixedpoint.Value `json:"db"` + IndexPrices []struct { + Market string `json:"M"` + Price fixedpoint.Value `json:"p"` + } `json:"idxp"` + TU types.MillisecondTimestamp `json:"TU"` +} - return &e +func (r *ADRatio) String() string { + return fmt.Sprintf("ADRatio: %v Asset: %v USDT, Debt: %v USDT (Mark Prices: %+v)", r.ADRatio, r.AssetInUSDT, r.DebtInUSDT, r.IndexPrices) } -type AccountSnapshotEvent struct { - BaseEvent - Balances []BalanceMessage `json:"B"` +type ADRatioEvent struct { + ADRatio ADRatio `json:"ad"` } -func parserAccountSnapshotEvent(v *fastjson.Value) *AccountSnapshotEvent { - var e AccountSnapshotEvent - e.Event = string(v.GetStringBytes("e")) - e.Timestamp = v.GetInt64("T") +func parseADRatioEvent(v *fastjson.Value) (*ADRatioEvent, error) { + o := v.String() + e := ADRatioEvent{} + err := json.Unmarshal([]byte(o), &e) + return &e, err +} - for _, bv := range v.GetArray("B") { - e.Balances = append(e.Balances, parseBalance(bv)) - } +type Debt struct { + Currency string `json:"cu"` + DebtPrincipal fixedpoint.Value `json:"dbp"` + DebtInterest fixedpoint.Value `json:"dbi"` + TU types.MillisecondTimestamp `json:"TU"` +} - return &e +func (d *Debt) String() string { + return fmt.Sprintf("Debt %s %v (Interest %v)", d.Currency, d.DebtPrincipal, d.DebtInterest) } -func parseAuthEvent(v *fastjson.Value) *AuthEvent { - return &AuthEvent{ - Event: string(v.GetStringBytes("e")), - ID: string(v.GetStringBytes("i")), - Timestamp: v.GetInt64("T"), - } +type DebtEvent struct { + Debts []Debt `json:"db"` +} + +func parseDebts(v *fastjson.Value) (*DebtEvent, error) { + o := v.String() + e := DebtEvent{} + err := json.Unmarshal([]byte(o), &e) + return &e, err } func ParseUserEvent(v *fastjson.Value) (interface{}, error) { eventType := string(v.GetStringBytes("e")) switch eventType { - case "order_snapshot": + case "order_snapshot", "mwallet_order_snapshot": return parserOrderSnapshotEvent(v), nil - case "order_update": + case "order_update", "mwallet_order_update": return parseOrderUpdateEvent(v), nil - case "trade_snapshot": + case "trade_snapshot", "mwallet_trade_snapshot": return parseTradeSnapshotEvent(v), nil - case "trade_update": + case "trade_update", "mwallet_trade_update": return parseTradeUpdateEvent(v), nil - case "account_snapshot": - return parserAccountSnapshotEvent(v), nil + case "ad_ratio_snapshot", "ad_ratio_update": + return parseADRatioEvent(v) - case "account_update": - return parserAccountUpdateEvent(v), nil + case "borrowing_snapshot", "borrowing_update": + return parseDebts(v) - case "authenticated": - return parseAuthEvent(v), nil + case "account_snapshot", "account_update", "mwallet_account_snapshot", "mwallet_account_update": + var e AccountUpdateEvent + o := v.String() + err := json.Unmarshal([]byte(o), &e) + return &e, err case "error": logger.Errorf("error %s", v.MarshalTo(nil)) diff --git a/pkg/exchange/max/maxapi/v3/cancel_order_request.go b/pkg/exchange/max/maxapi/v3/cancel_order_request.go new file mode 100644 index 0000000000..67bbaa52c8 --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/cancel_order_request.go @@ -0,0 +1,19 @@ +package v3 + +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST +//go:generate -command DeleteRequest requestgen -method DELETE + +import "github.com/c9s/requestgen" + +func (s *OrderService) NewCancelOrderRequest() *CancelOrderRequest { + return &CancelOrderRequest{client: s.Client} +} + +//go:generate DeleteRequest -url "/api/v3/order" -type CancelOrderRequest -responseType .Order +type CancelOrderRequest struct { + client requestgen.AuthenticatedAPIClient + + id *uint64 `param:"id,omitempty"` + clientOrderID *string `param:"client_oid,omitempty"` +} diff --git a/pkg/exchange/max/maxapi/v3/cancel_order_request_requestgen.go b/pkg/exchange/max/maxapi/v3/cancel_order_request_requestgen.go new file mode 100644 index 0000000000..12d9c684e4 --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/cancel_order_request_requestgen.go @@ -0,0 +1,164 @@ +// Code generated by "requestgen -method DELETE -url /api/v3/order -type CancelOrderRequest -responseType .Order"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "github.com/c9s/bbgo/pkg/exchange/max/maxapi" + "net/url" + "reflect" + "regexp" +) + +func (c *CancelOrderRequest) Id(id uint64) *CancelOrderRequest { + c.id = &id + return c +} + +func (c *CancelOrderRequest) ClientOrderID(clientOrderID string) *CancelOrderRequest { + c.clientOrderID = &clientOrderID + return c +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (c *CancelOrderRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (c *CancelOrderRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check id field -> json key id + if c.id != nil { + id := *c.id + + // assign parameter of id + params["id"] = id + } else { + } + // check clientOrderID field -> json key client_oid + if c.clientOrderID != nil { + clientOrderID := *c.clientOrderID + + // assign parameter of clientOrderID + params["client_oid"] = clientOrderID + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (c *CancelOrderRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := c.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if c.isVarSlice(_v) { + c.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (c *CancelOrderRequest) GetParametersJSON() ([]byte, error) { + params, err := c.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (c *CancelOrderRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (c *CancelOrderRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (c *CancelOrderRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (c *CancelOrderRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (c *CancelOrderRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := c.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (c *CancelOrderRequest) Do(ctx context.Context) (*max.Order, error) { + + params, err := c.GetParameters() + if err != nil { + return nil, err + } + query := url.Values{} + + apiURL := "/api/v3/order" + + req, err := c.client.NewAuthenticatedRequest(ctx, "DELETE", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := c.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse max.Order + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/cancel_wallet_order_all_request.go b/pkg/exchange/max/maxapi/v3/cancel_wallet_order_all_request.go new file mode 100644 index 0000000000..04825d3866 --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/cancel_wallet_order_all_request.go @@ -0,0 +1,21 @@ +package v3 + +import "github.com/c9s/requestgen" + +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST +//go:generate -command DeleteRequest requestgen -method DELETE + +func (s *OrderService) NewCancelWalletOrderAllRequest(walletType WalletType) *CancelWalletOrderAllRequest { + return &CancelWalletOrderAllRequest{client: s.Client, walletType: walletType} +} + +//go:generate DeleteRequest -url "/api/v3/wallet/:walletType/orders" -type CancelWalletOrderAllRequest -responseType []Order +type CancelWalletOrderAllRequest struct { + client requestgen.AuthenticatedAPIClient + + walletType WalletType `param:"walletType,slug,required"` + side *string `param:"side"` + market *string `param:"market"` + groupID *uint32 `param:"groupID"` +} diff --git a/pkg/exchange/max/maxapi/v3/cancel_wallet_order_all_request_requestgen.go b/pkg/exchange/max/maxapi/v3/cancel_wallet_order_all_request_requestgen.go new file mode 100644 index 0000000000..0015693f3e --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/cancel_wallet_order_all_request_requestgen.go @@ -0,0 +1,199 @@ +// Code generated by "requestgen -method DELETE -url /api/v3/wallet/:walletType/orders -type CancelWalletOrderAllRequest -responseType []Order"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "github.com/c9s/bbgo/pkg/exchange/max/maxapi" + "net/url" + "reflect" + "regexp" +) + +func (c *CancelWalletOrderAllRequest) Side(side string) *CancelWalletOrderAllRequest { + c.side = &side + return c +} + +func (c *CancelWalletOrderAllRequest) Market(market string) *CancelWalletOrderAllRequest { + c.market = &market + return c +} + +func (c *CancelWalletOrderAllRequest) GroupID(groupID uint32) *CancelWalletOrderAllRequest { + c.groupID = &groupID + return c +} + +func (c *CancelWalletOrderAllRequest) WalletType(walletType max.WalletType) *CancelWalletOrderAllRequest { + c.walletType = walletType + return c +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (c *CancelWalletOrderAllRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (c *CancelWalletOrderAllRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check side field -> json key side + if c.side != nil { + side := *c.side + + // assign parameter of side + params["side"] = side + } else { + } + // check market field -> json key market + if c.market != nil { + market := *c.market + + // assign parameter of market + params["market"] = market + } else { + } + // check groupID field -> json key groupID + if c.groupID != nil { + groupID := *c.groupID + + // assign parameter of groupID + params["groupID"] = groupID + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (c *CancelWalletOrderAllRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := c.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if c.isVarSlice(_v) { + c.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (c *CancelWalletOrderAllRequest) GetParametersJSON() ([]byte, error) { + params, err := c.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (c *CancelWalletOrderAllRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check walletType field -> json key walletType + walletType := c.walletType + + // TEMPLATE check-required + if len(walletType) == 0 { + return nil, fmt.Errorf("walletType is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of walletType + params["walletType"] = walletType + + return params, nil +} + +func (c *CancelWalletOrderAllRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (c *CancelWalletOrderAllRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (c *CancelWalletOrderAllRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (c *CancelWalletOrderAllRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := c.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (c *CancelWalletOrderAllRequest) Do(ctx context.Context) ([]max.Order, error) { + + params, err := c.GetParameters() + if err != nil { + return nil, err + } + query := url.Values{} + + apiURL := "/api/v3/wallet/:walletType/orders" + slugs, err := c.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = c.applySlugsToUrl(apiURL, slugs) + + req, err := c.client.NewAuthenticatedRequest(ctx, "DELETE", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := c.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []max.Order + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/create_wallet_order_request.go b/pkg/exchange/max/maxapi/v3/create_wallet_order_request.go new file mode 100644 index 0000000000..736c540cc5 --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/create_wallet_order_request.go @@ -0,0 +1,27 @@ +package v3 + +import "github.com/c9s/requestgen" + +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST +//go:generate -command DeleteRequest requestgen -method DELETE + +//go:generate PostRequest -url "/api/v3/wallet/:walletType/orders" -type CreateWalletOrderRequest -responseType .Order +type CreateWalletOrderRequest struct { + client requestgen.AuthenticatedAPIClient + + walletType WalletType `param:"walletType,slug,required"` + market string `param:"market,required"` + side string `param:"side,required"` + volume string `param:"volume,required"` + orderType string `param:"ord_type"` + + price *string `param:"price"` + stopPrice *string `param:"stop_price"` + clientOrderID *string `param:"client_oid"` + groupID *string `param:"group_id"` +} + +func (s *OrderService) NewCreateWalletOrderRequest(walletType WalletType) *CreateWalletOrderRequest { + return &CreateWalletOrderRequest{client: s.Client, walletType: walletType} +} diff --git a/pkg/exchange/max/maxapi/v3/create_wallet_order_request_requestgen.go b/pkg/exchange/max/maxapi/v3/create_wallet_order_request_requestgen.go new file mode 100644 index 0000000000..1a59fba9af --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/create_wallet_order_request_requestgen.go @@ -0,0 +1,270 @@ +// Code generated by "requestgen -method POST -url /api/v3/wallet/:walletType/orders -type CreateWalletOrderRequest -responseType .Order"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "github.com/c9s/bbgo/pkg/exchange/max/maxapi" + "net/url" + "reflect" + "regexp" +) + +func (c *CreateWalletOrderRequest) Market(market string) *CreateWalletOrderRequest { + c.market = market + return c +} + +func (c *CreateWalletOrderRequest) Side(side string) *CreateWalletOrderRequest { + c.side = side + return c +} + +func (c *CreateWalletOrderRequest) Volume(volume string) *CreateWalletOrderRequest { + c.volume = volume + return c +} + +func (c *CreateWalletOrderRequest) OrderType(orderType string) *CreateWalletOrderRequest { + c.orderType = orderType + return c +} + +func (c *CreateWalletOrderRequest) Price(price string) *CreateWalletOrderRequest { + c.price = &price + return c +} + +func (c *CreateWalletOrderRequest) StopPrice(stopPrice string) *CreateWalletOrderRequest { + c.stopPrice = &stopPrice + return c +} + +func (c *CreateWalletOrderRequest) ClientOrderID(clientOrderID string) *CreateWalletOrderRequest { + c.clientOrderID = &clientOrderID + return c +} + +func (c *CreateWalletOrderRequest) GroupID(groupID string) *CreateWalletOrderRequest { + c.groupID = &groupID + return c +} + +func (c *CreateWalletOrderRequest) WalletType(walletType max.WalletType) *CreateWalletOrderRequest { + c.walletType = walletType + return c +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (c *CreateWalletOrderRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (c *CreateWalletOrderRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check market field -> json key market + market := c.market + + // TEMPLATE check-required + if len(market) == 0 { + return nil, fmt.Errorf("market is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of market + params["market"] = market + // check side field -> json key side + side := c.side + + // TEMPLATE check-required + if len(side) == 0 { + return nil, fmt.Errorf("side is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of side + params["side"] = side + // check volume field -> json key volume + volume := c.volume + + // TEMPLATE check-required + if len(volume) == 0 { + return nil, fmt.Errorf("volume is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of volume + params["volume"] = volume + // check orderType field -> json key ord_type + orderType := c.orderType + + // assign parameter of orderType + params["ord_type"] = orderType + // check price field -> json key price + if c.price != nil { + price := *c.price + + // assign parameter of price + params["price"] = price + } else { + } + // check stopPrice field -> json key stop_price + if c.stopPrice != nil { + stopPrice := *c.stopPrice + + // assign parameter of stopPrice + params["stop_price"] = stopPrice + } else { + } + // check clientOrderID field -> json key client_oid + if c.clientOrderID != nil { + clientOrderID := *c.clientOrderID + + // assign parameter of clientOrderID + params["client_oid"] = clientOrderID + } else { + } + // check groupID field -> json key group_id + if c.groupID != nil { + groupID := *c.groupID + + // assign parameter of groupID + params["group_id"] = groupID + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (c *CreateWalletOrderRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := c.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if c.isVarSlice(_v) { + c.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (c *CreateWalletOrderRequest) GetParametersJSON() ([]byte, error) { + params, err := c.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (c *CreateWalletOrderRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check walletType field -> json key walletType + walletType := c.walletType + + // TEMPLATE check-required + if len(walletType) == 0 { + return nil, fmt.Errorf("walletType is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of walletType + params["walletType"] = walletType + + return params, nil +} + +func (c *CreateWalletOrderRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (c *CreateWalletOrderRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (c *CreateWalletOrderRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (c *CreateWalletOrderRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := c.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (c *CreateWalletOrderRequest) Do(ctx context.Context) (*max.Order, error) { + + params, err := c.GetParameters() + if err != nil { + return nil, err + } + query := url.Values{} + + apiURL := "/api/v3/wallet/:walletType/orders" + slugs, err := c.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = c.applySlugsToUrl(apiURL, slugs) + + req, err := c.client.NewAuthenticatedRequest(ctx, "POST", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := c.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse max.Order + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/get_margin_ad_ratio_request.go b/pkg/exchange/max/maxapi/v3/get_margin_ad_ratio_request.go new file mode 100644 index 0000000000..f01cc7c2bb --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_margin_ad_ratio_request.go @@ -0,0 +1,26 @@ +package v3 + +import ( + "github.com/c9s/requestgen" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST +//go:generate -command DeleteRequest requestgen -method DELETE + +func (s *MarginService) NewGetMarginADRatioRequest() *GetMarginADRatioRequest { + return &GetMarginADRatioRequest{client: s.Client} +} + +type ADRatio struct { + AdRatio fixedpoint.Value `json:"ad_ratio"` + AssetInUsdt fixedpoint.Value `json:"asset_in_usdt"` + DebtInUsdt fixedpoint.Value `json:"debt_in_usdt"` +} + +//go:generate GetRequest -url "/api/v3/wallet/m/ad_ratio" -type GetMarginADRatioRequest -responseType .ADRatio +type GetMarginADRatioRequest struct { + client requestgen.AuthenticatedAPIClient +} diff --git a/pkg/exchange/max/maxapi/v3/get_margin_ad_ratio_request_requestgen.go b/pkg/exchange/max/maxapi/v3/get_margin_ad_ratio_request_requestgen.go new file mode 100644 index 0000000000..cf54325a93 --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_margin_ad_ratio_request_requestgen.go @@ -0,0 +1,135 @@ +// Code generated by "requestgen -method GET -url /api/v3/wallet/m/ad_ratio -type GetMarginADRatioRequest -responseType .ADRatio"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetMarginADRatioRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetMarginADRatioRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetMarginADRatioRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetMarginADRatioRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetMarginADRatioRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetMarginADRatioRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetMarginADRatioRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetMarginADRatioRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetMarginADRatioRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetMarginADRatioRequest) Do(ctx context.Context) (*ADRatio, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/api/v3/wallet/m/ad_ratio" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse ADRatio + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/get_margin_borrowing_limits_request_requestgen.go b/pkg/exchange/max/maxapi/v3/get_margin_borrowing_limits_request_requestgen.go new file mode 100644 index 0000000000..4c631eab9e --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_margin_borrowing_limits_request_requestgen.go @@ -0,0 +1,135 @@ +// Code generated by "requestgen -method GET -url /api/v3/wallet/m/limits -type GetMarginBorrowingLimitsRequest -responseType .MarginBorrowingLimitMap"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetMarginBorrowingLimitsRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetMarginBorrowingLimitsRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetMarginBorrowingLimitsRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetMarginBorrowingLimitsRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetMarginBorrowingLimitsRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetMarginBorrowingLimitsRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetMarginBorrowingLimitsRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetMarginBorrowingLimitsRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetMarginBorrowingLimitsRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetMarginBorrowingLimitsRequest) Do(ctx context.Context) (*MarginBorrowingLimitMap, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/api/v3/wallet/m/limits" + + req, err := g.client.NewRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse MarginBorrowingLimitMap + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/get_margin_interest_history_request_requestgen.go b/pkg/exchange/max/maxapi/v3/get_margin_interest_history_request_requestgen.go new file mode 100644 index 0000000000..60002d36bc --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_margin_interest_history_request_requestgen.go @@ -0,0 +1,203 @@ +// Code generated by "requestgen -method GET -url /api/v3/wallet/m/interests/history/:currency -type GetMarginInterestHistoryRequest -responseType []MarginInterestRecord"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" + "strconv" + "time" +) + +func (g *GetMarginInterestHistoryRequest) StartTime(startTime time.Time) *GetMarginInterestHistoryRequest { + g.startTime = &startTime + return g +} + +func (g *GetMarginInterestHistoryRequest) EndTime(endTime time.Time) *GetMarginInterestHistoryRequest { + g.endTime = &endTime + return g +} + +func (g *GetMarginInterestHistoryRequest) Limit(limit int) *GetMarginInterestHistoryRequest { + g.limit = &limit + return g +} + +func (g *GetMarginInterestHistoryRequest) Currency(currency string) *GetMarginInterestHistoryRequest { + g.currency = currency + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetMarginInterestHistoryRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetMarginInterestHistoryRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check startTime field -> json key startTime + if g.startTime != nil { + startTime := *g.startTime + + // assign parameter of startTime + // convert time.Time to milliseconds time stamp + params["startTime"] = strconv.FormatInt(startTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check endTime field -> json key endTime + if g.endTime != nil { + endTime := *g.endTime + + // assign parameter of endTime + // convert time.Time to milliseconds time stamp + params["endTime"] = strconv.FormatInt(endTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check limit field -> json key limit + if g.limit != nil { + limit := *g.limit + + // assign parameter of limit + params["limit"] = limit + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetMarginInterestHistoryRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetMarginInterestHistoryRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetMarginInterestHistoryRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check currency field -> json key currency + currency := g.currency + + // TEMPLATE check-required + if len(currency) == 0 { + return nil, fmt.Errorf("currency is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of currency + params["currency"] = currency + + return params, nil +} + +func (g *GetMarginInterestHistoryRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetMarginInterestHistoryRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetMarginInterestHistoryRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetMarginInterestHistoryRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetMarginInterestHistoryRequest) Do(ctx context.Context) ([]MarginInterestRecord, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/api/v3/wallet/m/interests/history/:currency" + slugs, err := g.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = g.applySlugsToUrl(apiURL, slugs) + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []MarginInterestRecord + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/get_margin_interest_rates_request_requestgen.go b/pkg/exchange/max/maxapi/v3/get_margin_interest_rates_request_requestgen.go new file mode 100644 index 0000000000..6de0e5eaa0 --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_margin_interest_rates_request_requestgen.go @@ -0,0 +1,135 @@ +// Code generated by "requestgen -method GET -url /api/v3/wallet/m/interest_rates -type GetMarginInterestRatesRequest -responseType .MarginInterestRateMap"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetMarginInterestRatesRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetMarginInterestRatesRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetMarginInterestRatesRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetMarginInterestRatesRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetMarginInterestRatesRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetMarginInterestRatesRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetMarginInterestRatesRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetMarginInterestRatesRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetMarginInterestRatesRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetMarginInterestRatesRequest) Do(ctx context.Context) (*MarginInterestRateMap, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/api/v3/wallet/m/interest_rates" + + req, err := g.client.NewRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse MarginInterestRateMap + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/get_margin_liquidation_history_request_requestgen.go b/pkg/exchange/max/maxapi/v3/get_margin_liquidation_history_request_requestgen.go new file mode 100644 index 0000000000..257b8c8e7e --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_margin_liquidation_history_request_requestgen.go @@ -0,0 +1,181 @@ +// Code generated by "requestgen -method GET -url /api/v3/wallet/m/liquidations -type GetMarginLiquidationHistoryRequest -responseType []LiquidationRecord"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" + "strconv" + "time" +) + +func (g *GetMarginLiquidationHistoryRequest) StartTime(startTime time.Time) *GetMarginLiquidationHistoryRequest { + g.startTime = &startTime + return g +} + +func (g *GetMarginLiquidationHistoryRequest) EndTime(endTime time.Time) *GetMarginLiquidationHistoryRequest { + g.endTime = &endTime + return g +} + +func (g *GetMarginLiquidationHistoryRequest) Limit(limit int) *GetMarginLiquidationHistoryRequest { + g.limit = &limit + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetMarginLiquidationHistoryRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetMarginLiquidationHistoryRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check startTime field -> json key startTime + if g.startTime != nil { + startTime := *g.startTime + + // assign parameter of startTime + // convert time.Time to milliseconds time stamp + params["startTime"] = strconv.FormatInt(startTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check endTime field -> json key endTime + if g.endTime != nil { + endTime := *g.endTime + + // assign parameter of endTime + // convert time.Time to milliseconds time stamp + params["endTime"] = strconv.FormatInt(endTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check limit field -> json key limit + if g.limit != nil { + limit := *g.limit + + // assign parameter of limit + params["limit"] = limit + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetMarginLiquidationHistoryRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetMarginLiquidationHistoryRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetMarginLiquidationHistoryRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetMarginLiquidationHistoryRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetMarginLiquidationHistoryRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetMarginLiquidationHistoryRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetMarginLiquidationHistoryRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetMarginLiquidationHistoryRequest) Do(ctx context.Context) ([]LiquidationRecord, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/api/v3/wallet/m/liquidations" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []LiquidationRecord + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/get_margin_loan_history_request_requestgen.go b/pkg/exchange/max/maxapi/v3/get_margin_loan_history_request_requestgen.go new file mode 100644 index 0000000000..e0ca63db00 --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_margin_loan_history_request_requestgen.go @@ -0,0 +1,203 @@ +// Code generated by "requestgen -method GET -url /api/v3/wallet/m/loans/:currency -type GetMarginLoanHistoryRequest -responseType []LoanRecord"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" + "strconv" + "time" +) + +func (g *GetMarginLoanHistoryRequest) StartTime(startTime time.Time) *GetMarginLoanHistoryRequest { + g.startTime = &startTime + return g +} + +func (g *GetMarginLoanHistoryRequest) EndTime(endTime time.Time) *GetMarginLoanHistoryRequest { + g.endTime = &endTime + return g +} + +func (g *GetMarginLoanHistoryRequest) Limit(limit int) *GetMarginLoanHistoryRequest { + g.limit = &limit + return g +} + +func (g *GetMarginLoanHistoryRequest) Currency(currency string) *GetMarginLoanHistoryRequest { + g.currency = currency + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetMarginLoanHistoryRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetMarginLoanHistoryRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check startTime field -> json key startTime + if g.startTime != nil { + startTime := *g.startTime + + // assign parameter of startTime + // convert time.Time to milliseconds time stamp + params["startTime"] = strconv.FormatInt(startTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check endTime field -> json key endTime + if g.endTime != nil { + endTime := *g.endTime + + // assign parameter of endTime + // convert time.Time to milliseconds time stamp + params["endTime"] = strconv.FormatInt(endTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check limit field -> json key limit + if g.limit != nil { + limit := *g.limit + + // assign parameter of limit + params["limit"] = limit + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetMarginLoanHistoryRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetMarginLoanHistoryRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetMarginLoanHistoryRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check currency field -> json key currency + currency := g.currency + + // TEMPLATE check-required + if len(currency) == 0 { + return nil, fmt.Errorf("currency is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of currency + params["currency"] = currency + + return params, nil +} + +func (g *GetMarginLoanHistoryRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetMarginLoanHistoryRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetMarginLoanHistoryRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetMarginLoanHistoryRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetMarginLoanHistoryRequest) Do(ctx context.Context) ([]LoanRecord, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/api/v3/wallet/m/loans/:currency" + slugs, err := g.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = g.applySlugsToUrl(apiURL, slugs) + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []LoanRecord + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/get_margin_repayment_history_request_requestgen.go b/pkg/exchange/max/maxapi/v3/get_margin_repayment_history_request_requestgen.go new file mode 100644 index 0000000000..83edcd7fdb --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_margin_repayment_history_request_requestgen.go @@ -0,0 +1,203 @@ +// Code generated by "requestgen -method GET -url /api/v3/wallet/m/repayments/:currency -type GetMarginRepaymentHistoryRequest -responseType []RepaymentRecord"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" + "strconv" + "time" +) + +func (g *GetMarginRepaymentHistoryRequest) StartTime(startTime time.Time) *GetMarginRepaymentHistoryRequest { + g.startTime = &startTime + return g +} + +func (g *GetMarginRepaymentHistoryRequest) EndTime(endTime time.Time) *GetMarginRepaymentHistoryRequest { + g.endTime = &endTime + return g +} + +func (g *GetMarginRepaymentHistoryRequest) Limit(limit int) *GetMarginRepaymentHistoryRequest { + g.limit = &limit + return g +} + +func (g *GetMarginRepaymentHistoryRequest) Currency(currency string) *GetMarginRepaymentHistoryRequest { + g.currency = currency + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetMarginRepaymentHistoryRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetMarginRepaymentHistoryRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check startTime field -> json key startTime + if g.startTime != nil { + startTime := *g.startTime + + // assign parameter of startTime + // convert time.Time to milliseconds time stamp + params["startTime"] = strconv.FormatInt(startTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check endTime field -> json key endTime + if g.endTime != nil { + endTime := *g.endTime + + // assign parameter of endTime + // convert time.Time to milliseconds time stamp + params["endTime"] = strconv.FormatInt(endTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check limit field -> json key limit + if g.limit != nil { + limit := *g.limit + + // assign parameter of limit + params["limit"] = limit + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetMarginRepaymentHistoryRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetMarginRepaymentHistoryRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetMarginRepaymentHistoryRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check currency field -> json key currency + currency := g.currency + + // TEMPLATE check-required + if len(currency) == 0 { + return nil, fmt.Errorf("currency is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of currency + params["currency"] = currency + + return params, nil +} + +func (g *GetMarginRepaymentHistoryRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetMarginRepaymentHistoryRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetMarginRepaymentHistoryRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetMarginRepaymentHistoryRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetMarginRepaymentHistoryRequest) Do(ctx context.Context) ([]RepaymentRecord, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/api/v3/wallet/m/repayments/:currency" + slugs, err := g.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = g.applySlugsToUrl(apiURL, slugs) + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []RepaymentRecord + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/get_order_request.go b/pkg/exchange/max/maxapi/v3/get_order_request.go new file mode 100644 index 0000000000..94de554210 --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_order_request.go @@ -0,0 +1,19 @@ +package v3 + +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST +//go:generate -command DeleteRequest requestgen -method DELETE + +import "github.com/c9s/requestgen" + +func (s *OrderService) NewGetOrderRequest() *GetOrderRequest { + return &GetOrderRequest{client: s.Client} +} + +//go:generate GetRequest -url "/api/v3/order" -type GetOrderRequest -responseType .Order +type GetOrderRequest struct { + client requestgen.AuthenticatedAPIClient + + id *uint64 `param:"id,omitempty"` + clientOrderID *string `param:"client_oid,omitempty"` +} diff --git a/pkg/exchange/max/maxapi/v3/get_order_request_requestgen.go b/pkg/exchange/max/maxapi/v3/get_order_request_requestgen.go new file mode 100644 index 0000000000..ae8c39e019 --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_order_request_requestgen.go @@ -0,0 +1,165 @@ +// Code generated by "requestgen -method GET -url /api/v3/order -type GetOrderRequest -responseType .Order"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "github.com/c9s/bbgo/pkg/exchange/max/maxapi" + "net/url" + "reflect" + "regexp" +) + +func (g *GetOrderRequest) Id(id uint64) *GetOrderRequest { + g.id = &id + return g +} + +func (g *GetOrderRequest) ClientOrderID(clientOrderID string) *GetOrderRequest { + g.clientOrderID = &clientOrderID + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetOrderRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetOrderRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check id field -> json key id + if g.id != nil { + id := *g.id + + // assign parameter of id + params["id"] = id + } else { + } + // check clientOrderID field -> json key client_oid + if g.clientOrderID != nil { + clientOrderID := *g.clientOrderID + + // assign parameter of clientOrderID + params["client_oid"] = clientOrderID + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetOrderRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetOrderRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetOrderRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (g *GetOrderRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetOrderRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetOrderRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetOrderRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetOrderRequest) Do(ctx context.Context) (*max.Order, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/api/v3/order" + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse max.Order + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/get_wallet_accounts_request.go b/pkg/exchange/max/maxapi/v3/get_wallet_accounts_request.go new file mode 100644 index 0000000000..20b2ebdd8c --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_wallet_accounts_request.go @@ -0,0 +1,18 @@ +package v3 + +import "github.com/c9s/requestgen" + +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST +//go:generate -command DeleteRequest requestgen -method DELETE + +func (s *OrderService) NewGetWalletAccountsRequest(walletType WalletType) *GetWalletAccountsRequest { + return &GetWalletAccountsRequest{client: s.Client, walletType: walletType} +} + +//go:generate GetRequest -url "/api/v3/wallet/:walletType/accounts" -type GetWalletAccountsRequest -responseType []Account +type GetWalletAccountsRequest struct { + client requestgen.AuthenticatedAPIClient + + walletType WalletType `param:"walletType,slug,required"` +} diff --git a/pkg/exchange/max/maxapi/v3/get_wallet_accounts_request_requestgen.go b/pkg/exchange/max/maxapi/v3/get_wallet_accounts_request_requestgen.go new file mode 100644 index 0000000000..7c5c1ff04a --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_wallet_accounts_request_requestgen.go @@ -0,0 +1,158 @@ +// Code generated by "requestgen -method GET -url /api/v3/wallet/:walletType/accounts -type GetWalletAccountsRequest -responseType []Account"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "github.com/c9s/bbgo/pkg/exchange/max/maxapi" + "net/url" + "reflect" + "regexp" +) + +func (g *GetWalletAccountsRequest) WalletType(walletType max.WalletType) *GetWalletAccountsRequest { + g.walletType = walletType + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetWalletAccountsRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetWalletAccountsRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetWalletAccountsRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetWalletAccountsRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetWalletAccountsRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check walletType field -> json key walletType + walletType := g.walletType + + // TEMPLATE check-required + if len(walletType) == 0 { + return nil, fmt.Errorf("walletType is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of walletType + params["walletType"] = walletType + + return params, nil +} + +func (g *GetWalletAccountsRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetWalletAccountsRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetWalletAccountsRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetWalletAccountsRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetWalletAccountsRequest) Do(ctx context.Context) ([]max.Account, error) { + + // no body params + var params interface{} + query := url.Values{} + + apiURL := "/api/v3/wallet/:walletType/accounts" + slugs, err := g.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = g.applySlugsToUrl(apiURL, slugs) + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []max.Account + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/get_wallet_open_orders_request.go b/pkg/exchange/max/maxapi/v3/get_wallet_open_orders_request.go new file mode 100644 index 0000000000..3841becabc --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_wallet_open_orders_request.go @@ -0,0 +1,19 @@ +package v3 + +import "github.com/c9s/requestgen" + +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST +//go:generate -command DeleteRequest requestgen -method DELETE + +func (s *OrderService) NewGetWalletOpenOrdersRequest(walletType WalletType) *GetWalletOpenOrdersRequest { + return &GetWalletOpenOrdersRequest{client: s.Client, walletType: walletType} +} + +//go:generate GetRequest -url "/api/v3/wallet/:walletType/orders/open" -type GetWalletOpenOrdersRequest -responseType []Order +type GetWalletOpenOrdersRequest struct { + client requestgen.AuthenticatedAPIClient + + walletType WalletType `param:"walletType,slug,required"` + market string `param:"market,required"` +} diff --git a/pkg/exchange/max/maxapi/v3/get_wallet_open_orders_request_requestgen.go b/pkg/exchange/max/maxapi/v3/get_wallet_open_orders_request_requestgen.go new file mode 100644 index 0000000000..8121085b25 --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_wallet_open_orders_request_requestgen.go @@ -0,0 +1,177 @@ +// Code generated by "requestgen -method GET -url /api/v3/wallet/:walletType/orders/open -type GetWalletOpenOrdersRequest -responseType []Order"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "github.com/c9s/bbgo/pkg/exchange/max/maxapi" + "net/url" + "reflect" + "regexp" +) + +func (g *GetWalletOpenOrdersRequest) Market(market string) *GetWalletOpenOrdersRequest { + g.market = market + return g +} + +func (g *GetWalletOpenOrdersRequest) WalletType(walletType max.WalletType) *GetWalletOpenOrdersRequest { + g.walletType = walletType + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetWalletOpenOrdersRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetWalletOpenOrdersRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check market field -> json key market + market := g.market + + // TEMPLATE check-required + if len(market) == 0 { + return nil, fmt.Errorf("market is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of market + params["market"] = market + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetWalletOpenOrdersRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetWalletOpenOrdersRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetWalletOpenOrdersRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check walletType field -> json key walletType + walletType := g.walletType + + // TEMPLATE check-required + if len(walletType) == 0 { + return nil, fmt.Errorf("walletType is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of walletType + params["walletType"] = walletType + + return params, nil +} + +func (g *GetWalletOpenOrdersRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetWalletOpenOrdersRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetWalletOpenOrdersRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetWalletOpenOrdersRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetWalletOpenOrdersRequest) Do(ctx context.Context) ([]max.Order, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/api/v3/wallet/:walletType/orders/open" + slugs, err := g.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = g.applySlugsToUrl(apiURL, slugs) + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []max.Order + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/get_wallet_order_history_request.go b/pkg/exchange/max/maxapi/v3/get_wallet_order_history_request.go new file mode 100644 index 0000000000..cf6f804575 --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_wallet_order_history_request.go @@ -0,0 +1,22 @@ +package v3 + +import "github.com/c9s/requestgen" + +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST +//go:generate -command DeleteRequest requestgen -method DELETE + +func (s *OrderService) NewGetWalletOrderHistoryRequest(walletType WalletType) *GetWalletOrderHistoryRequest { + return &GetWalletOrderHistoryRequest{client: s.Client, walletType: walletType} +} + +//go:generate GetRequest -url "/api/v3/wallet/:walletType/orders/history" -type GetWalletOrderHistoryRequest -responseType []Order +type GetWalletOrderHistoryRequest struct { + client requestgen.AuthenticatedAPIClient + + walletType WalletType `param:"walletType,slug,required"` + + market string `param:"market,required"` + fromID *uint64 `param:"from_id"` + limit *uint `param:"limit"` +} diff --git a/pkg/exchange/max/maxapi/v3/get_wallet_order_history_request_requestgen.go b/pkg/exchange/max/maxapi/v3/get_wallet_order_history_request_requestgen.go new file mode 100644 index 0000000000..c6b7393dc5 --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_wallet_order_history_request_requestgen.go @@ -0,0 +1,203 @@ +// Code generated by "requestgen -method GET -url /api/v3/wallet/:walletType/orders/history -type GetWalletOrderHistoryRequest -responseType []Order"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "github.com/c9s/bbgo/pkg/exchange/max/maxapi" + "net/url" + "reflect" + "regexp" +) + +func (g *GetWalletOrderHistoryRequest) Market(market string) *GetWalletOrderHistoryRequest { + g.market = market + return g +} + +func (g *GetWalletOrderHistoryRequest) FromID(fromID uint64) *GetWalletOrderHistoryRequest { + g.fromID = &fromID + return g +} + +func (g *GetWalletOrderHistoryRequest) Limit(limit uint) *GetWalletOrderHistoryRequest { + g.limit = &limit + return g +} + +func (g *GetWalletOrderHistoryRequest) WalletType(walletType max.WalletType) *GetWalletOrderHistoryRequest { + g.walletType = walletType + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetWalletOrderHistoryRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetWalletOrderHistoryRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check market field -> json key market + market := g.market + + // TEMPLATE check-required + if len(market) == 0 { + return nil, fmt.Errorf("market is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of market + params["market"] = market + // check fromID field -> json key from_id + if g.fromID != nil { + fromID := *g.fromID + + // assign parameter of fromID + params["from_id"] = fromID + } else { + } + // check limit field -> json key limit + if g.limit != nil { + limit := *g.limit + + // assign parameter of limit + params["limit"] = limit + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetWalletOrderHistoryRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetWalletOrderHistoryRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetWalletOrderHistoryRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check walletType field -> json key walletType + walletType := g.walletType + + // TEMPLATE check-required + if len(walletType) == 0 { + return nil, fmt.Errorf("walletType is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of walletType + params["walletType"] = walletType + + return params, nil +} + +func (g *GetWalletOrderHistoryRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetWalletOrderHistoryRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetWalletOrderHistoryRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetWalletOrderHistoryRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetWalletOrderHistoryRequest) Do(ctx context.Context) ([]max.Order, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/api/v3/wallet/:walletType/orders/history" + slugs, err := g.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = g.applySlugsToUrl(apiURL, slugs) + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []max.Order + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/get_wallet_trades_request.go b/pkg/exchange/max/maxapi/v3/get_wallet_trades_request.go new file mode 100644 index 0000000000..e4804a7c14 --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_wallet_trades_request.go @@ -0,0 +1,28 @@ +package v3 + +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST +//go:generate -command DeleteRequest requestgen -method DELETE + +import ( + "time" + + "github.com/c9s/requestgen" +) + +func (s *OrderService) NewGetWalletTradesRequest(walletType WalletType) *GetWalletTradesRequest { + return &GetWalletTradesRequest{client: s.Client, walletType: walletType} +} + +//go:generate GetRequest -url "/api/v3/wallet/:walletType/trades" -type GetWalletTradesRequest -responseType []Trade +type GetWalletTradesRequest struct { + client requestgen.AuthenticatedAPIClient + + walletType WalletType `param:"walletType,slug,required"` + + market string `param:"market,required"` + from *uint64 `param:"from_id"` + startTime *time.Time `param:"start_time,milliseconds"` + endTime *time.Time `param:"end_time,milliseconds"` + limit *uint64 `param:"limit"` +} diff --git a/pkg/exchange/max/maxapi/v3/get_wallet_trades_request_requestgen.go b/pkg/exchange/max/maxapi/v3/get_wallet_trades_request_requestgen.go new file mode 100644 index 0000000000..2fdf94c400 --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/get_wallet_trades_request_requestgen.go @@ -0,0 +1,233 @@ +// Code generated by "requestgen -method GET -url /api/v3/wallet/:walletType/trades -type GetWalletTradesRequest -responseType []Trade"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "github.com/c9s/bbgo/pkg/exchange/max/maxapi" + "net/url" + "reflect" + "regexp" + "strconv" + "time" +) + +func (g *GetWalletTradesRequest) Market(market string) *GetWalletTradesRequest { + g.market = market + return g +} + +func (g *GetWalletTradesRequest) From(from uint64) *GetWalletTradesRequest { + g.from = &from + return g +} + +func (g *GetWalletTradesRequest) StartTime(startTime time.Time) *GetWalletTradesRequest { + g.startTime = &startTime + return g +} + +func (g *GetWalletTradesRequest) EndTime(endTime time.Time) *GetWalletTradesRequest { + g.endTime = &endTime + return g +} + +func (g *GetWalletTradesRequest) Limit(limit uint64) *GetWalletTradesRequest { + g.limit = &limit + return g +} + +func (g *GetWalletTradesRequest) WalletType(walletType max.WalletType) *GetWalletTradesRequest { + g.walletType = walletType + return g +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (g *GetWalletTradesRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (g *GetWalletTradesRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check market field -> json key market + market := g.market + + // TEMPLATE check-required + if len(market) == 0 { + return nil, fmt.Errorf("market is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of market + params["market"] = market + // check from field -> json key from_id + if g.from != nil { + from := *g.from + + // assign parameter of from + params["from_id"] = from + } else { + } + // check startTime field -> json key start_time + if g.startTime != nil { + startTime := *g.startTime + + // assign parameter of startTime + // convert time.Time to milliseconds time stamp + params["start_time"] = strconv.FormatInt(startTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check endTime field -> json key end_time + if g.endTime != nil { + endTime := *g.endTime + + // assign parameter of endTime + // convert time.Time to milliseconds time stamp + params["end_time"] = strconv.FormatInt(endTime.UnixNano()/int64(time.Millisecond), 10) + } else { + } + // check limit field -> json key limit + if g.limit != nil { + limit := *g.limit + + // assign parameter of limit + params["limit"] = limit + } else { + } + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (g *GetWalletTradesRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := g.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if g.isVarSlice(_v) { + g.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (g *GetWalletTradesRequest) GetParametersJSON() ([]byte, error) { + params, err := g.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (g *GetWalletTradesRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check walletType field -> json key walletType + walletType := g.walletType + + // TEMPLATE check-required + if len(walletType) == 0 { + return nil, fmt.Errorf("walletType is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of walletType + params["walletType"] = walletType + + return params, nil +} + +func (g *GetWalletTradesRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (g *GetWalletTradesRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (g *GetWalletTradesRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (g *GetWalletTradesRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := g.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (g *GetWalletTradesRequest) Do(ctx context.Context) ([]max.Trade, error) { + + // empty params for GET operation + var params interface{} + query, err := g.GetParametersQuery() + if err != nil { + return nil, err + } + + apiURL := "/api/v3/wallet/:walletType/trades" + slugs, err := g.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = g.applySlugsToUrl(apiURL, slugs) + + req, err := g.client.NewAuthenticatedRequest(ctx, "GET", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := g.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse []max.Trade + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/margin.go b/pkg/exchange/max/maxapi/v3/margin.go new file mode 100644 index 0000000000..e69422a268 --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/margin.go @@ -0,0 +1,160 @@ +package v3 + +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST +//go:generate -command DeleteRequest requestgen -method DELETE + +import ( + "time" + + "github.com/c9s/requestgen" + + maxapi "github.com/c9s/bbgo/pkg/exchange/max/maxapi" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +type MarginService struct { + Client *maxapi.RestClient +} + +func (s *MarginService) NewGetMarginInterestRatesRequest() *GetMarginInterestRatesRequest { + return &GetMarginInterestRatesRequest{client: s.Client} +} + +func (s *MarginService) NewGetMarginBorrowingLimitsRequest() *GetMarginBorrowingLimitsRequest { + return &GetMarginBorrowingLimitsRequest{client: s.Client} +} + +func (s *MarginService) NewGetMarginInterestHistoryRequest(currency string) *GetMarginInterestHistoryRequest { + return &GetMarginInterestHistoryRequest{client: s.Client, currency: currency} +} + +func (s *MarginService) NewGetMarginLiquidationHistoryRequest() *GetMarginLiquidationHistoryRequest { + return &GetMarginLiquidationHistoryRequest{client: s.Client} +} + +func (s *MarginService) NewGetMarginLoanHistoryRequest() *GetMarginLoanHistoryRequest { + return &GetMarginLoanHistoryRequest{client: s.Client} +} + +func (s *MarginService) NewMarginRepayRequest() *MarginRepayRequest { + return &MarginRepayRequest{client: s.Client} +} + +func (s *MarginService) NewMarginLoanRequest() *MarginLoanRequest { + return &MarginLoanRequest{client: s.Client} +} + +type MarginInterestRate struct { + HourlyInterestRate fixedpoint.Value `json:"hourly_interest_rate"` + NextHourlyInterestRate fixedpoint.Value `json:"next_hourly_interest_rate"` +} + +type MarginInterestRateMap map[string]MarginInterestRate + +//go:generate GetRequest -url "/api/v3/wallet/m/interest_rates" -type GetMarginInterestRatesRequest -responseType .MarginInterestRateMap +type GetMarginInterestRatesRequest struct { + client requestgen.APIClient +} + +type MarginBorrowingLimitMap map[string]fixedpoint.Value + +//go:generate GetRequest -url "/api/v3/wallet/m/limits" -type GetMarginBorrowingLimitsRequest -responseType .MarginBorrowingLimitMap +type GetMarginBorrowingLimitsRequest struct { + client requestgen.APIClient +} + +type MarginInterestRecord struct { + Currency string `json:"currency"` + Amount fixedpoint.Value `json:"amount"` + InterestRate fixedpoint.Value `json:"interest_rate"` + CreatedAt types.MillisecondTimestamp `json:"created_at"` +} + +//go:generate GetRequest -url "/api/v3/wallet/m/interests/history/:currency" -type GetMarginInterestHistoryRequest -responseType []MarginInterestRecord +type GetMarginInterestHistoryRequest struct { + client requestgen.AuthenticatedAPIClient + + currency string `param:"currency,slug,required"` + startTime *time.Time `param:"startTime,milliseconds"` + endTime *time.Time `param:"endTime,milliseconds"` + limit *int `param:"limit"` +} + +type LiquidationRecord struct { + SN string `json:"sn"` + AdRatio fixedpoint.Value `json:"ad_ratio"` + ExpectedAdRatio fixedpoint.Value `json:"expected_ad_ratio"` + CreatedAt types.MillisecondTimestamp `json:"created_at"` + State LiquidationState `json:"state"` +} + +type LiquidationState string + +const ( + LiquidationStateProcessing LiquidationState = "processing" + LiquidationStateDebt LiquidationState = "debt" + LiquidationStateLiquidated LiquidationState = "liquidated" +) + +//go:generate GetRequest -url "/api/v3/wallet/m/liquidations" -type GetMarginLiquidationHistoryRequest -responseType []LiquidationRecord +type GetMarginLiquidationHistoryRequest struct { + client requestgen.AuthenticatedAPIClient + startTime *time.Time `param:"startTime,milliseconds"` + endTime *time.Time `param:"endTime,milliseconds"` + limit *int `param:"limit"` +} + +type RepaymentRecord struct { + SN string `json:"sn"` + Currency string `json:"currency"` + Amount fixedpoint.Value `json:"amount"` + Principal fixedpoint.Value `json:"principal"` + Interest fixedpoint.Value `json:"interest"` + CreatedAt types.MillisecondTimestamp `json:"created_at"` + State string `json:"state"` +} + +//go:generate GetRequest -url "/api/v3/wallet/m/repayments/:currency" -type GetMarginRepaymentHistoryRequest -responseType []RepaymentRecord +type GetMarginRepaymentHistoryRequest struct { + client requestgen.AuthenticatedAPIClient + currency string `param:"currency,slug,required"` + + startTime *time.Time `param:"startTime,milliseconds"` + endTime *time.Time `param:"endTime,milliseconds"` + limit *int `param:"limit"` +} + +type LoanRecord struct { + SN string `json:"sn"` + Currency string `json:"currency"` + Amount fixedpoint.Value `json:"amount"` + State string `json:"state"` + CreatedAt types.MillisecondTimestamp `json:"created_at"` + InterestRate fixedpoint.Value `json:"interest_rate"` +} + +//go:generate GetRequest -url "/api/v3/wallet/m/loans/:currency" -type GetMarginLoanHistoryRequest -responseType []LoanRecord +type GetMarginLoanHistoryRequest struct { + client requestgen.AuthenticatedAPIClient + currency string `param:"currency,slug,required"` + + startTime *time.Time `param:"startTime,milliseconds"` + endTime *time.Time `param:"endTime,milliseconds"` + limit *int `param:"limit"` +} + +//go:generate PostRequest -url "/api/v3/wallet/m/loan/:currency" -type MarginLoanRequest -responseType .LoanRecord +type MarginLoanRequest struct { + client requestgen.AuthenticatedAPIClient + currency string `param:"currency,slug,required"` + amount string `param:"amount"` +} + +//go:generate PostRequest -url "/api/v3/wallet/m/repayment/:currency" -type MarginRepayRequest -responseType .RepaymentRecord +type MarginRepayRequest struct { + client requestgen.AuthenticatedAPIClient + currency string `param:"currency,slug,required"` + amount string `param:"amount"` +} diff --git a/pkg/exchange/max/maxapi/v3/margin_loan_request_requestgen.go b/pkg/exchange/max/maxapi/v3/margin_loan_request_requestgen.go new file mode 100644 index 0000000000..8f3e73466c --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/margin_loan_request_requestgen.go @@ -0,0 +1,169 @@ +// Code generated by "requestgen -method POST -url /api/v3/wallet/m/loan/:currency -type MarginLoanRequest -responseType .LoanRecord"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +func (m *MarginLoanRequest) Amount(amount string) *MarginLoanRequest { + m.amount = amount + return m +} + +func (m *MarginLoanRequest) Currency(currency string) *MarginLoanRequest { + m.currency = currency + return m +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (m *MarginLoanRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (m *MarginLoanRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check amount field -> json key amount + amount := m.amount + + // assign parameter of amount + params["amount"] = amount + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (m *MarginLoanRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := m.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if m.isVarSlice(_v) { + m.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (m *MarginLoanRequest) GetParametersJSON() ([]byte, error) { + params, err := m.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (m *MarginLoanRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check currency field -> json key currency + currency := m.currency + + // TEMPLATE check-required + if len(currency) == 0 { + return nil, fmt.Errorf("currency is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of currency + params["currency"] = currency + + return params, nil +} + +func (m *MarginLoanRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (m *MarginLoanRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (m *MarginLoanRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (m *MarginLoanRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := m.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (m *MarginLoanRequest) Do(ctx context.Context) (*LoanRecord, error) { + + params, err := m.GetParameters() + if err != nil { + return nil, err + } + query := url.Values{} + + apiURL := "/api/v3/wallet/m/loan/:currency" + slugs, err := m.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = m.applySlugsToUrl(apiURL, slugs) + + req, err := m.client.NewAuthenticatedRequest(ctx, "POST", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := m.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse LoanRecord + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/margin_repay_request_requestgen.go b/pkg/exchange/max/maxapi/v3/margin_repay_request_requestgen.go new file mode 100644 index 0000000000..a84beb4756 --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/margin_repay_request_requestgen.go @@ -0,0 +1,169 @@ +// Code generated by "requestgen -method POST -url /api/v3/wallet/m/repayment/:currency -type MarginRepayRequest -responseType .RepaymentRecord"; DO NOT EDIT. + +package v3 + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +func (m *MarginRepayRequest) Amount(amount string) *MarginRepayRequest { + m.amount = amount + return m +} + +func (m *MarginRepayRequest) Currency(currency string) *MarginRepayRequest { + m.currency = currency + return m +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (m *MarginRepayRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for _k, _v := range params { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (m *MarginRepayRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check amount field -> json key amount + amount := m.amount + + // assign parameter of amount + params["amount"] = amount + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (m *MarginRepayRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := m.GetParameters() + if err != nil { + return query, err + } + + for _k, _v := range params { + if m.isVarSlice(_v) { + m.iterateSlice(_v, func(it interface{}) { + query.Add(_k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(_k, fmt.Sprintf("%v", _v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (m *MarginRepayRequest) GetParametersJSON() ([]byte, error) { + params, err := m.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (m *MarginRepayRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check currency field -> json key currency + currency := m.currency + + // TEMPLATE check-required + if len(currency) == 0 { + return nil, fmt.Errorf("currency is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of currency + params["currency"] = currency + + return params, nil +} + +func (m *MarginRepayRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for _k, _v := range slugs { + needleRE := regexp.MustCompile(":" + _k + "\\b") + url = needleRE.ReplaceAllString(url, _v) + } + + return url +} + +func (m *MarginRepayRequest) iterateSlice(slice interface{}, _f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for _i := 0; _i < sliceValue.Len(); _i++ { + it := sliceValue.Index(_i).Interface() + _f(it) + } +} + +func (m *MarginRepayRequest) isVarSlice(_v interface{}) bool { + rt := reflect.TypeOf(_v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (m *MarginRepayRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := m.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for _k, _v := range params { + slugs[_k] = fmt.Sprintf("%v", _v) + } + + return slugs, nil +} + +func (m *MarginRepayRequest) Do(ctx context.Context) (*RepaymentRecord, error) { + + params, err := m.GetParameters() + if err != nil { + return nil, err + } + query := url.Values{} + + apiURL := "/api/v3/wallet/m/repayment/:currency" + slugs, err := m.GetSlugsMap() + if err != nil { + return nil, err + } + + apiURL = m.applySlugsToUrl(apiURL, slugs) + + req, err := m.client.NewAuthenticatedRequest(ctx, "POST", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := m.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse RepaymentRecord + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/max/maxapi/v3/order.go b/pkg/exchange/max/maxapi/v3/order.go new file mode 100644 index 0000000000..412c634d1a --- /dev/null +++ b/pkg/exchange/max/maxapi/v3/order.go @@ -0,0 +1,20 @@ +package v3 + +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST +//go:generate -command DeleteRequest requestgen -method DELETE + +import ( + maxapi "github.com/c9s/bbgo/pkg/exchange/max/maxapi" +) + +// create type alias +type WalletType = maxapi.WalletType +type Order = maxapi.Order +type Trade = maxapi.Trade +type Account = maxapi.Account + +// OrderService manages the Order endpoint. +type OrderService struct { + Client *maxapi.RestClient +} diff --git a/pkg/exchange/max/maxapi/websocket.go b/pkg/exchange/max/maxapi/websocket.go index dcdf7143c9..241adda6a9 100644 --- a/pkg/exchange/max/maxapi/websocket.go +++ b/pkg/exchange/max/maxapi/websocket.go @@ -1,12 +1,6 @@ package max import ( - "context" - "fmt" - "time" - - "github.com/google/uuid" - "github.com/gorilla/websocket" "github.com/pkg/errors" ) @@ -14,6 +8,11 @@ var WebSocketURL = "wss://max-stream.maicoin.com/ws" var ErrMessageTypeNotSupported = errors.New("message type currently not supported") +type SubscribeOptions struct { + Depth int `json:"depth,omitempty"` + Resolution string `json:"resolution,omitempty"` +} + // Subscription is used for presenting the subscription metadata. // This is used for sending subscribe and unsubscribe requests type Subscription struct { @@ -29,231 +28,3 @@ type WebsocketCommand struct { Action string `json:"action"` Subscriptions []Subscription `json:"subscriptions,omitempty"` } - -var SubscribeAction = "subscribe" -var UnsubscribeAction = "unsubscribe" - -//go:generate callbackgen -type WebSocketService -type WebSocketService struct { - baseURL, key, secret string - - conn *websocket.Conn - - reconnectC chan struct{} - - // Subscriptions is the subscription request payloads that will be used for sending subscription request - Subscriptions []Subscription - - connectCallbacks []func(conn *websocket.Conn) - disconnectCallbacks []func(conn *websocket.Conn) - - errorCallbacks []func(err error) - messageCallbacks []func(message []byte) - bookEventCallbacks []func(e BookEvent) - tradeEventCallbacks []func(e PublicTradeEvent) - kLineEventCallbacks []func(e KLineEvent) - errorEventCallbacks []func(e ErrorEvent) - subscriptionEventCallbacks []func(e SubscriptionEvent) - - tradeUpdateEventCallbacks []func(e TradeUpdateEvent) - tradeSnapshotEventCallbacks []func(e TradeSnapshotEvent) - orderUpdateEventCallbacks []func(e OrderUpdateEvent) - orderSnapshotEventCallbacks []func(e OrderSnapshotEvent) - - accountSnapshotEventCallbacks []func(e AccountSnapshotEvent) - accountUpdateEventCallbacks []func(e AccountUpdateEvent) -} - -func NewWebSocketService(wsURL string, key, secret string) *WebSocketService { - return &WebSocketService{ - key: key, - secret: secret, - reconnectC: make(chan struct{}, 1), - baseURL: wsURL, - } -} - -func (s *WebSocketService) Connect(ctx context.Context) error { - s.OnConnect(func(c *websocket.Conn) { - if err := s.SendSubscriptionRequest(SubscribeAction); err != nil { - s.EmitError(err) - logger.WithError(err).Error("failed to subscribe") - } - }) - - // pre-allocate the websocket client, the websocket client can be used for reconnecting. - if err := s.connect(ctx); err != nil { - return err - } - go s.read(ctx) - return nil -} - -func (s *WebSocketService) Auth() error { - nonce := time.Now().UnixNano() / int64(time.Millisecond) - auth := &AuthMessage{ - Action: "auth", - APIKey: s.key, - Nonce: nonce, - Signature: signPayload(fmt.Sprintf("%d", nonce), s.secret), - ID: uuid.New().String(), - } - return s.conn.WriteJSON(auth) -} - -func (s *WebSocketService) connect(ctx context.Context) error { - dialer := websocket.DefaultDialer - conn, _, err := dialer.DialContext(ctx, s.baseURL, nil) - if err != nil { - return err - } - - s.conn = conn - s.EmitConnect(conn) - - return nil -} - -func (s *WebSocketService) emitReconnect() { - select { - case s.reconnectC <- struct{}{}: - default: - } -} - -func (s *WebSocketService) read(ctx context.Context) { - for { - select { - case <-ctx.Done(): - return - - case <-s.reconnectC: - time.Sleep(3 * time.Second) - if err := s.connect(ctx); err != nil { - s.emitReconnect() - } - - default: - mt, msg, err := s.conn.ReadMessage() - - if err != nil { - s.emitReconnect() - continue - } - - if mt != websocket.TextMessage { - continue - } - - s.EmitMessage(msg) - - m, err := ParseMessage(msg) - if err != nil { - s.EmitError(errors.Wrapf(err, "failed to parse message: %s", msg)) - continue - } - - if m != nil { - s.dispatch(m) - } - } - } -} - -func (s *WebSocketService) dispatch(msg interface{}) { - switch e := msg.(type) { - - case *BookEvent: - s.EmitBookEvent(*e) - - case *PublicTradeEvent: - s.EmitTradeEvent(*e) - - case *KLineEvent: - s.EmitKLineEvent(*e) - - case *ErrorEvent: - s.EmitErrorEvent(*e) - - case *SubscriptionEvent: - s.EmitSubscriptionEvent(*e) - - case *TradeSnapshotEvent: - s.EmitTradeSnapshotEvent(*e) - - case *TradeUpdateEvent: - s.EmitTradeUpdateEvent(*e) - - case *AccountSnapshotEvent: - s.EmitAccountSnapshotEvent(*e) - - case *AccountUpdateEvent: - s.EmitAccountUpdateEvent(*e) - - case *OrderSnapshotEvent: - s.EmitOrderSnapshotEvent(*e) - - case *OrderUpdateEvent: - s.EmitOrderUpdateEvent(*e) - - default: - s.EmitError(fmt.Errorf("unsupported %T event: %+v", e, e)) - } -} - -func (s *WebSocketService) ClearSubscriptions() { - s.Subscriptions = nil -} - -func (s *WebSocketService) Reconnect() { - logger.Info("reconnecting...") - s.emitReconnect() -} - -// Subscribe is a helper method for building subscription request from the internal mapping types. -// (Internal public method) -func (s *WebSocketService) Subscribe(channel, market string) { - s.AddSubscription(Subscription{ - Channel: channel, - Market: market, - }) -} - -// AddSubscription adds the subscription request to the buffer, these requests will be sent to the server right after connecting to the endpoint. -func (s *WebSocketService) AddSubscription(subscription Subscription) { - s.Subscriptions = append(s.Subscriptions, subscription) -} - -func (s *WebSocketService) Resubscribe() { - // Calling Resubscribe() by websocket is not enough to refresh orderbook. - // We still need to get orderbook snapshot by rest client. - // Therefore Reconnect() is used to simplify implementation. - logger.Info("resubscribing all subscription...") - if err := s.SendSubscriptionRequest(UnsubscribeAction); err != nil { - logger.WithError(err).Error("failed to unsubscribe") - } - - if err := s.SendSubscriptionRequest(SubscribeAction); err != nil { - logger.WithError(err).Error("failed to unsubscribe") - } -} - -func (s *WebSocketService) SendSubscriptionRequest(action string) error { - request := WebsocketCommand{ - Action: action, - Subscriptions: s.Subscriptions, - } - - logger.Debugf("sending websocket subscription: %+v", request) - - if err := s.conn.WriteJSON(request); err != nil { - return errors.Wrap(err, "Failed to send subscribe event") - } - - return nil -} - -// Close web socket connection -func (s *WebSocketService) Close() error { - return s.conn.Close() -} diff --git a/pkg/exchange/max/maxapi/websocketservice_callbacks.go b/pkg/exchange/max/maxapi/websocketservice_callbacks.go deleted file mode 100644 index e32d2b5dbc..0000000000 --- a/pkg/exchange/max/maxapi/websocketservice_callbacks.go +++ /dev/null @@ -1,157 +0,0 @@ -// Code generated by "callbackgen -type WebSocketService"; DO NOT EDIT. - -package max - -import ( - "github.com/gorilla/websocket" -) - -func (s *WebSocketService) OnConnect(cb func(conn *websocket.Conn)) { - s.connectCallbacks = append(s.connectCallbacks, cb) -} - -func (s *WebSocketService) EmitConnect(conn *websocket.Conn) { - for _, cb := range s.connectCallbacks { - cb(conn) - } -} - -func (s *WebSocketService) OnDisconnect(cb func(conn *websocket.Conn)) { - s.disconnectCallbacks = append(s.disconnectCallbacks, cb) -} - -func (s *WebSocketService) EmitDisconnect(conn *websocket.Conn) { - for _, cb := range s.disconnectCallbacks { - cb(conn) - } -} - -func (s *WebSocketService) OnError(cb func(err error)) { - s.errorCallbacks = append(s.errorCallbacks, cb) -} - -func (s *WebSocketService) EmitError(err error) { - for _, cb := range s.errorCallbacks { - cb(err) - } -} - -func (s *WebSocketService) OnMessage(cb func(message []byte)) { - s.messageCallbacks = append(s.messageCallbacks, cb) -} - -func (s *WebSocketService) EmitMessage(message []byte) { - for _, cb := range s.messageCallbacks { - cb(message) - } -} - -func (s *WebSocketService) OnBookEvent(cb func(e BookEvent)) { - s.bookEventCallbacks = append(s.bookEventCallbacks, cb) -} - -func (s *WebSocketService) EmitBookEvent(e BookEvent) { - for _, cb := range s.bookEventCallbacks { - cb(e) - } -} - -func (s *WebSocketService) OnTradeEvent(cb func(e PublicTradeEvent)) { - s.tradeEventCallbacks = append(s.tradeEventCallbacks, cb) -} - -func (s *WebSocketService) EmitTradeEvent(e PublicTradeEvent) { - for _, cb := range s.tradeEventCallbacks { - cb(e) - } -} - -func (s *WebSocketService) OnKLineEvent(cb func(e KLineEvent)) { - s.kLineEventCallbacks = append(s.kLineEventCallbacks, cb) -} - -func (s *WebSocketService) EmitKLineEvent(e KLineEvent) { - for _, cb := range s.kLineEventCallbacks { - cb(e) - } -} - -func (s *WebSocketService) OnErrorEvent(cb func(e ErrorEvent)) { - s.errorEventCallbacks = append(s.errorEventCallbacks, cb) -} - -func (s *WebSocketService) EmitErrorEvent(e ErrorEvent) { - for _, cb := range s.errorEventCallbacks { - cb(e) - } -} - -func (s *WebSocketService) OnSubscriptionEvent(cb func(e SubscriptionEvent)) { - s.subscriptionEventCallbacks = append(s.subscriptionEventCallbacks, cb) -} - -func (s *WebSocketService) EmitSubscriptionEvent(e SubscriptionEvent) { - for _, cb := range s.subscriptionEventCallbacks { - cb(e) - } -} - -func (s *WebSocketService) OnTradeUpdateEvent(cb func(e TradeUpdateEvent)) { - s.tradeUpdateEventCallbacks = append(s.tradeUpdateEventCallbacks, cb) -} - -func (s *WebSocketService) EmitTradeUpdateEvent(e TradeUpdateEvent) { - for _, cb := range s.tradeUpdateEventCallbacks { - cb(e) - } -} - -func (s *WebSocketService) OnTradeSnapshotEvent(cb func(e TradeSnapshotEvent)) { - s.tradeSnapshotEventCallbacks = append(s.tradeSnapshotEventCallbacks, cb) -} - -func (s *WebSocketService) EmitTradeSnapshotEvent(e TradeSnapshotEvent) { - for _, cb := range s.tradeSnapshotEventCallbacks { - cb(e) - } -} - -func (s *WebSocketService) OnOrderUpdateEvent(cb func(e OrderUpdateEvent)) { - s.orderUpdateEventCallbacks = append(s.orderUpdateEventCallbacks, cb) -} - -func (s *WebSocketService) EmitOrderUpdateEvent(e OrderUpdateEvent) { - for _, cb := range s.orderUpdateEventCallbacks { - cb(e) - } -} - -func (s *WebSocketService) OnOrderSnapshotEvent(cb func(e OrderSnapshotEvent)) { - s.orderSnapshotEventCallbacks = append(s.orderSnapshotEventCallbacks, cb) -} - -func (s *WebSocketService) EmitOrderSnapshotEvent(e OrderSnapshotEvent) { - for _, cb := range s.orderSnapshotEventCallbacks { - cb(e) - } -} - -func (s *WebSocketService) OnAccountSnapshotEvent(cb func(e AccountSnapshotEvent)) { - s.accountSnapshotEventCallbacks = append(s.accountSnapshotEventCallbacks, cb) -} - -func (s *WebSocketService) EmitAccountSnapshotEvent(e AccountSnapshotEvent) { - for _, cb := range s.accountSnapshotEventCallbacks { - cb(e) - } -} - -func (s *WebSocketService) OnAccountUpdateEvent(cb func(e AccountUpdateEvent)) { - s.accountUpdateEventCallbacks = append(s.accountUpdateEventCallbacks, cb) -} - -func (s *WebSocketService) EmitAccountUpdateEvent(e AccountUpdateEvent) { - for _, cb := range s.accountUpdateEventCallbacks { - cb(e) - } -} diff --git a/pkg/exchange/max/maxapi/withdrawal.go b/pkg/exchange/max/maxapi/withdrawal.go new file mode 100644 index 0000000000..7e3581539a --- /dev/null +++ b/pkg/exchange/max/maxapi/withdrawal.go @@ -0,0 +1,81 @@ +package max + +//go:generate -command GetRequest requestgen -method GET +//go:generate -command PostRequest requestgen -method POST + +import ( + "github.com/c9s/requestgen" +) + +/* + example response + + { + "uuid": "18022603540001", + "currency": "eth", + "currency_version": "eth", + "amount": "0.019", + "fee": "0.0", + "fee_currency": "eth", + "created_at": 1521726960, + "updated_at": 1521726960, + "state": "confirmed", + "type": "external", + "transaction_type": "external send", + "notes": "notes", + "sender": { + "email": "max****@maicoin.com" + }, + "recipient": { + "address": "0x5c7d23d516f120d322fc7b116386b7e491739138" + } + } +*/ + +//go:generate PostRequest -url "v2/withdrawal" -type WithdrawalRequest -responseType .Withdraw +type WithdrawalRequest struct { + client requestgen.AuthenticatedAPIClient + + addressUUID string `param:"address_uuid,required"` + currency string `param:"currency,required"` + amount float64 `param:"amount"` +} + +type WithdrawalAddress struct { + UUID string `json:"uuid"` + Currency string `json:"currency"` + CurrencyVersion string `json:"currency_version"` + Address string `json:"address"` + ExtraLabel string `json:"extra_label"` + State string `json:"state"` + SygnaVaspCode string `json:"sygna_vasp_code"` + SygnaUserType string `json:"sygna_user_type"` + SygnaUserCode string `json:"sygna_user_code"` + IsInternal bool `json:"is_internal"` +} + +//go:generate GetRequest -url "v2/withdraw_addresses" -type GetWithdrawalAddressesRequest -responseType []WithdrawalAddress +type GetWithdrawalAddressesRequest struct { + client requestgen.AuthenticatedAPIClient + currency string `param:"currency,required"` +} + +type WithdrawalService struct { + client *RestClient +} + +func (s *WithdrawalService) NewGetWithdrawalAddressesRequest() *GetWithdrawalAddressesRequest { + return &GetWithdrawalAddressesRequest{ + client: s.client, + } +} + +func (s *WithdrawalService) NewWithdrawalRequest() *WithdrawalRequest { + return &WithdrawalRequest{client: s.client} +} + +func (s *WithdrawalService) NewGetWithdrawalHistoryRequest() *GetWithdrawHistoryRequest { + return &GetWithdrawHistoryRequest{ + client: s.client, + } +} diff --git a/pkg/exchange/max/maxapi/withdrawal_request_requestgen.go b/pkg/exchange/max/maxapi/withdrawal_request_requestgen.go new file mode 100644 index 0000000000..e2cfeb3f61 --- /dev/null +++ b/pkg/exchange/max/maxapi/withdrawal_request_requestgen.go @@ -0,0 +1,179 @@ +// Code generated by "requestgen -method POST -url v2/withdrawal -type WithdrawalRequest -responseType .Withdraw"; DO NOT EDIT. + +package max + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "reflect" + "regexp" +) + +func (w *WithdrawalRequest) AddressUUID(addressUUID string) *WithdrawalRequest { + w.addressUUID = addressUUID + return w +} + +func (w *WithdrawalRequest) Currency(currency string) *WithdrawalRequest { + w.currency = currency + return w +} + +func (w *WithdrawalRequest) Amount(amount float64) *WithdrawalRequest { + w.amount = amount + return w +} + +// GetQueryParameters builds and checks the query parameters and returns url.Values +func (w *WithdrawalRequest) GetQueryParameters() (url.Values, error) { + var params = map[string]interface{}{} + + query := url.Values{} + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +// GetParameters builds and checks the parameters and return the result in a map object +func (w *WithdrawalRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + // check addressUUID field -> json key address_uuid + addressUUID := w.addressUUID + + // TEMPLATE check-required + if len(addressUUID) == 0 { + return nil, fmt.Errorf("address_uuid is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of addressUUID + params["address_uuid"] = addressUUID + // check currency field -> json key currency + currency := w.currency + + // TEMPLATE check-required + if len(currency) == 0 { + return nil, fmt.Errorf("currency is required, empty string given") + } + // END TEMPLATE check-required + + // assign parameter of currency + params["currency"] = currency + // check amount field -> json key amount + amount := w.amount + + // assign parameter of amount + params["amount"] = amount + + return params, nil +} + +// GetParametersQuery converts the parameters from GetParameters into the url.Values format +func (w *WithdrawalRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := w.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + if w.isVarSlice(v) { + w.iterateSlice(v, func(it interface{}) { + query.Add(k+"[]", fmt.Sprintf("%v", it)) + }) + } else { + query.Add(k, fmt.Sprintf("%v", v)) + } + } + + return query, nil +} + +// GetParametersJSON converts the parameters from GetParameters into the JSON format +func (w *WithdrawalRequest) GetParametersJSON() ([]byte, error) { + params, err := w.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} + +// GetSlugParameters builds and checks the slug parameters and return the result in a map object +func (w *WithdrawalRequest) GetSlugParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + return params, nil +} + +func (w *WithdrawalRequest) applySlugsToUrl(url string, slugs map[string]string) string { + for k, v := range slugs { + needleRE := regexp.MustCompile(":" + k + "\\b") + url = needleRE.ReplaceAllString(url, v) + } + + return url +} + +func (w *WithdrawalRequest) iterateSlice(slice interface{}, f func(it interface{})) { + sliceValue := reflect.ValueOf(slice) + for i := 0; i < sliceValue.Len(); i++ { + it := sliceValue.Index(i).Interface() + f(it) + } +} + +func (w *WithdrawalRequest) isVarSlice(v interface{}) bool { + rt := reflect.TypeOf(v) + switch rt.Kind() { + case reflect.Slice: + return true + } + return false +} + +func (w *WithdrawalRequest) GetSlugsMap() (map[string]string, error) { + slugs := map[string]string{} + params, err := w.GetSlugParameters() + if err != nil { + return slugs, nil + } + + for k, v := range params { + slugs[k] = fmt.Sprintf("%v", v) + } + + return slugs, nil +} + +func (w *WithdrawalRequest) Do(ctx context.Context) (*Withdraw, error) { + + params, err := w.GetParameters() + if err != nil { + return nil, err + } + query := url.Values{} + + apiURL := "v2/withdrawal" + + req, err := w.client.NewAuthenticatedRequest(ctx, "POST", apiURL, query, params) + if err != nil { + return nil, err + } + + response, err := w.client.SendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse Withdraw + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + return &apiResponse, nil +} diff --git a/pkg/exchange/max/stream.go b/pkg/exchange/max/stream.go index 79c6442127..8a80bc7ad7 100644 --- a/pkg/exchange/max/stream.go +++ b/pkg/exchange/max/stream.go @@ -2,232 +2,281 @@ package max import ( "context" - "strconv" + "crypto/hmac" + "crypto/sha256" + "encoding/hex" + "fmt" + "os" "time" - "github.com/gorilla/websocket" + "github.com/google/uuid" max "github.com/c9s/bbgo/pkg/exchange/max/maxapi" - "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" - "github.com/c9s/bbgo/pkg/util" ) -var logger = log.WithField("exchange", "max") - +//go:generate callbackgen -type Stream type Stream struct { types.StandardStream - - websocketService *max.WebSocketService - - publicOnly bool + types.MarginSettings + + key, secret string + + authEventCallbacks []func(e max.AuthEvent) + bookEventCallbacks []func(e max.BookEvent) + tradeEventCallbacks []func(e max.PublicTradeEvent) + kLineEventCallbacks []func(e max.KLineEvent) + errorEventCallbacks []func(e max.ErrorEvent) + subscriptionEventCallbacks []func(e max.SubscriptionEvent) + + tradeUpdateEventCallbacks []func(e max.TradeUpdateEvent) + tradeSnapshotEventCallbacks []func(e max.TradeSnapshotEvent) + orderUpdateEventCallbacks []func(e max.OrderUpdateEvent) + orderSnapshotEventCallbacks []func(e max.OrderSnapshotEvent) + adRatioEventCallbacks []func(e max.ADRatioEvent) + debtEventCallbacks []func(e max.DebtEvent) + + accountSnapshotEventCallbacks []func(e max.AccountSnapshotEvent) + accountUpdateEventCallbacks []func(e max.AccountUpdateEvent) } func NewStream(key, secret string) *Stream { - wss := max.NewWebSocketService(max.WebSocketURL, key, secret) - stream := &Stream{ - websocketService: wss, + StandardStream: types.NewStandardStream(), + key: key, + // pragma: allowlist nextline secret + secret: secret, } + stream.SetEndpointCreator(stream.getEndpoint) + stream.SetParser(max.ParseMessage) + stream.SetDispatcher(stream.dispatchEvent) + stream.OnConnect(stream.handleConnect) + stream.OnKLineEvent(stream.handleKLineEvent) + stream.OnOrderSnapshotEvent(stream.handleOrderSnapshotEvent) + stream.OnOrderUpdateEvent(stream.handleOrderUpdateEvent) + stream.OnTradeUpdateEvent(stream.handleTradeEvent) + stream.OnBookEvent(stream.handleBookEvent) + stream.OnAccountSnapshotEvent(stream.handleAccountSnapshotEvent) + stream.OnAccountUpdateEvent(stream.handleAccountUpdateEvent) + return stream +} - wss.OnConnect(func(conn *websocket.Conn) { - if key == "" || secret == "" { - log.Warn("MAX API key or secret is empty, will not send authentication command") - } else { - if err := wss.Auth(); err != nil { - wss.EmitError(err) - logger.WithError(err).Error("failed to send auth request") - } +func (s *Stream) getEndpoint(ctx context.Context) (string, error) { + url := os.Getenv("MAX_API_WS_URL") + if url == "" { + url = max.WebSocketURL + } + return url, nil +} + +func (s *Stream) handleConnect() { + if s.PublicOnly { + cmd := &max.WebsocketCommand{ + Action: "subscribe", } - }) + for _, sub := range s.Subscriptions { + var depth int - wss.OnMessage(func(message []byte) { - logger.Debugf("M: %s", message) - }) + if len(sub.Options.Depth) > 0 { + switch sub.Options.Depth { + case types.DepthLevelFull: + depth = 0 - wss.OnKLineEvent(func(e max.KLineEvent) { - kline := e.KLine.KLine() - stream.EmitKLine(kline) - if kline.Closed { - stream.EmitKLineClosed(kline) - } - }) - - wss.OnOrderSnapshotEvent(func(e max.OrderSnapshotEvent) { - for _, o := range e.Orders { - globalOrder, err := toGlobalOrderUpdate(o) - if err != nil { - log.WithError(err).Error("websocket order snapshot convert error") - continue + case types.DepthLevelMedium: + depth = 20 + + case types.DepthLevel5: + depth = 5 + + } } - stream.EmitOrderUpdate(*globalOrder) + cmd.Subscriptions = append(cmd.Subscriptions, max.Subscription{ + Channel: string(sub.Channel), + Market: toLocalSymbol(sub.Symbol), + Depth: depth, + Resolution: sub.Options.Interval.String(), + }) } - }) - - wss.OnOrderUpdateEvent(func(e max.OrderUpdateEvent) { - for _, o := range e.Orders { - globalOrder, err := toGlobalOrderUpdate(o) - if err != nil { - log.WithError(err).Error("websocket order update convert error") - continue - } - stream.EmitOrderUpdate(*globalOrder) + if err := s.Conn.WriteJSON(cmd); err != nil { + log.WithError(err).Error("failed to send subscription request") } - }) - - wss.OnTradeUpdateEvent(func(e max.TradeUpdateEvent) { - for _, tradeUpdate := range e.Trades { - trade, err := convertWebSocketTrade(tradeUpdate) - if err != nil { - log.WithError(err).Error("websocket trade update convert error") - return + + } else { + var filters []string + if s.MarginSettings.IsMargin { + filters = []string{ + "mwallet_order", + "mwallet_trade", + "mwallet_account", + "ad_ratio", + "borrowing", } + } - stream.EmitTradeUpdate(*trade) + nonce := time.Now().UnixNano() / int64(time.Millisecond) + auth := &max.AuthMessage{ + // pragma: allowlist nextline secret + Action: "auth", + // pragma: allowlist nextline secret + APIKey: s.key, + Nonce: nonce, + Signature: signPayload(fmt.Sprintf("%d", nonce), s.secret), + ID: uuid.New().String(), + Filters: filters, } - }) - wss.OnBookEvent(func(e max.BookEvent) { - newBook, err := e.OrderBook() - if err != nil { - logger.WithError(err).Error("book convert error") - return + if err := s.Conn.WriteJSON(auth); err != nil { + log.WithError(err).Error("failed to send auth request") } + } +} - newBook.Symbol = toGlobalSymbol(e.Market) +func (s *Stream) handleKLineEvent(e max.KLineEvent) { + kline := e.KLine.KLine() + s.EmitKLine(kline) + if kline.Closed { + s.EmitKLineClosed(kline) + } +} - switch e.Event { - case "snapshot": - stream.EmitBookSnapshot(newBook) - case "update": - stream.EmitBookUpdate(newBook) +func (s *Stream) handleOrderSnapshotEvent(e max.OrderSnapshotEvent) { + for _, o := range e.Orders { + globalOrder, err := convertWebSocketOrderUpdate(o) + if err != nil { + log.WithError(err).Error("websocket order snapshot convert error") + continue } - }) - - wss.OnConnect(func(conn *websocket.Conn) { - stream.EmitConnect() - }) - - wss.OnAccountSnapshotEvent(func(e max.AccountSnapshotEvent) { - snapshot := map[string]types.Balance{} - for _, bm := range e.Balances { - balance, err := bm.Balance() - if err != nil { - continue - } - snapshot[toGlobalCurrency(balance.Currency)] = *balance - } + s.EmitOrderUpdate(*globalOrder) + } +} - stream.EmitBalanceSnapshot(snapshot) - }) +func (s *Stream) handleOrderUpdateEvent(e max.OrderUpdateEvent) { + for _, o := range e.Orders { + globalOrder, err := convertWebSocketOrderUpdate(o) + if err != nil { + log.WithError(err).Error("websocket order update convert error") + continue + } - wss.OnAccountUpdateEvent(func(e max.AccountUpdateEvent) { - snapshot := map[string]types.Balance{} - for _, bm := range e.Balances { - balance, err := bm.Balance() - if err != nil { - continue - } + s.EmitOrderUpdate(*globalOrder) + } +} - snapshot[toGlobalCurrency(balance.Currency)] = *balance +func (s *Stream) handleTradeEvent(e max.TradeUpdateEvent) { + for _, tradeUpdate := range e.Trades { + trade, err := convertWebSocketTrade(tradeUpdate) + if err != nil { + log.WithError(err).Error("websocket trade update convert error") + return } - stream.EmitBalanceUpdate(snapshot) - }) + s.EmitTradeUpdate(*trade) + } +} - wss.OnError(func(err error) { - log.WithError(err).Error("websocket error") - }) +func (s *Stream) handleBookEvent(e max.BookEvent) { + newBook, err := e.OrderBook() + if err != nil { + log.WithError(err).Error("book convert error") + return + } - return stream -} + newBook.Symbol = toGlobalSymbol(e.Market) -func (s *Stream) SetPublicOnly() { - s.publicOnly = true + switch e.Event { + case "snapshot": + s.EmitBookSnapshot(newBook) + case "update": + s.EmitBookUpdate(newBook) + } } -func (s *Stream) Subscribe(channel types.Channel, symbol string, options types.SubscribeOptions) { - s.websocketService.Subscribe(string(channel), toLocalSymbol(symbol)) -} +func (s *Stream) handleAccountSnapshotEvent(e max.AccountSnapshotEvent) { + snapshot := map[string]types.Balance{} + for _, bm := range e.Balances { + balance, err := bm.Balance() + if err != nil { + continue + } -func (s *Stream) Connect(ctx context.Context) error { - return s.websocketService.Connect(ctx) + snapshot[balance.Currency] = *balance + } + + s.EmitBalanceSnapshot(snapshot) } -func (s *Stream) Close() error { - return s.websocketService.Close() +func (s *Stream) handleAccountUpdateEvent(e max.AccountUpdateEvent) { + snapshot := map[string]types.Balance{} + for _, bm := range e.Balances { + balance, err := bm.Balance() + if err != nil { + continue + } + + snapshot[toGlobalCurrency(balance.Currency)] = *balance + } + + s.EmitBalanceUpdate(snapshot) } -func convertWebSocketTrade(t max.TradeUpdate) (*types.Trade, error) { - // skip trade ID that is the same. however this should not happen - var side = toGlobalSideType(t.Side) +func (s *Stream) dispatchEvent(e interface{}) { + switch e := e.(type) { - // trade time - mts := time.Unix(0, t.Timestamp*int64(time.Millisecond)) + case *max.AuthEvent: + s.EmitAuthEvent(*e) - price, err := strconv.ParseFloat(t.Price, 64) - if err != nil { - return nil, err - } + case *max.BookEvent: + s.EmitBookEvent(*e) - quantity, err := strconv.ParseFloat(t.Volume, 64) - if err != nil { - return nil, err - } + case *max.PublicTradeEvent: + s.EmitTradeEvent(*e) - quoteQuantity := price * quantity + case *max.KLineEvent: + s.EmitKLineEvent(*e) - fee, err := strconv.ParseFloat(t.Fee, 64) - if err != nil { - return nil, err - } + case *max.ErrorEvent: + s.EmitErrorEvent(*e) - return &types.Trade{ - ID: int64(t.ID), - OrderID: t.OrderID, - Symbol: toGlobalSymbol(t.Market), - Exchange: "max", - Price: price, - Quantity: quantity, - Side: side, - IsBuyer: side == "bid", - IsMaker: t.Maker, - Fee: fee, - FeeCurrency: toGlobalCurrency(t.FeeCurrency), - QuoteQuantity: quoteQuantity, - Time: mts, - }, nil -} + case *max.SubscriptionEvent: + s.EmitSubscriptionEvent(*e) -func toGlobalOrderUpdate(u max.OrderUpdate) (*types.Order, error) { - executedVolume, err := fixedpoint.NewFromString(u.ExecutedVolume) - if err != nil { - return nil, err + case *max.TradeSnapshotEvent: + s.EmitTradeSnapshotEvent(*e) + + case *max.TradeUpdateEvent: + s.EmitTradeUpdateEvent(*e) + + case *max.AccountSnapshotEvent: + s.EmitAccountSnapshotEvent(*e) + + case *max.AccountUpdateEvent: + s.EmitAccountUpdateEvent(*e) + + case *max.OrderSnapshotEvent: + s.EmitOrderSnapshotEvent(*e) + + case *max.OrderUpdateEvent: + s.EmitOrderUpdateEvent(*e) + + case *max.ADRatioEvent: + log.Infof("adRatio: %+v", e.ADRatio) + + case *max.DebtEvent: + log.Infof("debtEvent: %+v", e.Debts) + + default: + log.Warnf("unhandled %T event: %+v", e, e) } +} - remainingVolume, err := fixedpoint.NewFromString(u.RemainingVolume) +func signPayload(payload string, secret string) string { + var sig = hmac.New(sha256.New, []byte(secret)) + _, err := sig.Write([]byte(payload)) if err != nil { - return nil, err + return "" } - - return &types.Order{ - SubmitOrder: types.SubmitOrder{ - ClientOrderID: u.ClientOID, - Symbol: toGlobalSymbol(u.Market), - Side: toGlobalSideType(u.Side), - Type: toGlobalOrderType(u.OrderType), - Quantity: util.MustParseFloat(u.Volume), - Price: util.MustParseFloat(u.Price), - StopPrice: util.MustParseFloat(u.StopPrice), - TimeInForce: "GTC", // MAX only supports GTC - }, - Exchange: "max", - OrderID: u.ID, - Status: toGlobalOrderStatus(u.State, executedVolume, remainingVolume), - ExecutedQuantity: executedVolume.Float64(), - CreationTime: time.Unix(0, u.CreatedAtMs*int64(time.Millisecond)), - }, nil + return hex.EncodeToString(sig.Sum(nil)) } diff --git a/pkg/exchange/max/stream_callbacks.go b/pkg/exchange/max/stream_callbacks.go new file mode 100644 index 0000000000..3f556ef3cb --- /dev/null +++ b/pkg/exchange/max/stream_callbacks.go @@ -0,0 +1,147 @@ +// Code generated by "callbackgen -type Stream"; DO NOT EDIT. + +package max + +import ( + "github.com/c9s/bbgo/pkg/exchange/max/maxapi" +) + +func (s *Stream) OnAuthEvent(cb func(e max.AuthEvent)) { + s.authEventCallbacks = append(s.authEventCallbacks, cb) +} + +func (s *Stream) EmitAuthEvent(e max.AuthEvent) { + for _, cb := range s.authEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnBookEvent(cb func(e max.BookEvent)) { + s.bookEventCallbacks = append(s.bookEventCallbacks, cb) +} + +func (s *Stream) EmitBookEvent(e max.BookEvent) { + for _, cb := range s.bookEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnTradeEvent(cb func(e max.PublicTradeEvent)) { + s.tradeEventCallbacks = append(s.tradeEventCallbacks, cb) +} + +func (s *Stream) EmitTradeEvent(e max.PublicTradeEvent) { + for _, cb := range s.tradeEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnKLineEvent(cb func(e max.KLineEvent)) { + s.kLineEventCallbacks = append(s.kLineEventCallbacks, cb) +} + +func (s *Stream) EmitKLineEvent(e max.KLineEvent) { + for _, cb := range s.kLineEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnErrorEvent(cb func(e max.ErrorEvent)) { + s.errorEventCallbacks = append(s.errorEventCallbacks, cb) +} + +func (s *Stream) EmitErrorEvent(e max.ErrorEvent) { + for _, cb := range s.errorEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnSubscriptionEvent(cb func(e max.SubscriptionEvent)) { + s.subscriptionEventCallbacks = append(s.subscriptionEventCallbacks, cb) +} + +func (s *Stream) EmitSubscriptionEvent(e max.SubscriptionEvent) { + for _, cb := range s.subscriptionEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnTradeUpdateEvent(cb func(e max.TradeUpdateEvent)) { + s.tradeUpdateEventCallbacks = append(s.tradeUpdateEventCallbacks, cb) +} + +func (s *Stream) EmitTradeUpdateEvent(e max.TradeUpdateEvent) { + for _, cb := range s.tradeUpdateEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnTradeSnapshotEvent(cb func(e max.TradeSnapshotEvent)) { + s.tradeSnapshotEventCallbacks = append(s.tradeSnapshotEventCallbacks, cb) +} + +func (s *Stream) EmitTradeSnapshotEvent(e max.TradeSnapshotEvent) { + for _, cb := range s.tradeSnapshotEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnOrderUpdateEvent(cb func(e max.OrderUpdateEvent)) { + s.orderUpdateEventCallbacks = append(s.orderUpdateEventCallbacks, cb) +} + +func (s *Stream) EmitOrderUpdateEvent(e max.OrderUpdateEvent) { + for _, cb := range s.orderUpdateEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnOrderSnapshotEvent(cb func(e max.OrderSnapshotEvent)) { + s.orderSnapshotEventCallbacks = append(s.orderSnapshotEventCallbacks, cb) +} + +func (s *Stream) EmitOrderSnapshotEvent(e max.OrderSnapshotEvent) { + for _, cb := range s.orderSnapshotEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnAdRatioEvent(cb func(e max.ADRatioEvent)) { + s.adRatioEventCallbacks = append(s.adRatioEventCallbacks, cb) +} + +func (s *Stream) EmitAdRatioEvent(e max.ADRatioEvent) { + for _, cb := range s.adRatioEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnDebtEvent(cb func(e max.DebtEvent)) { + s.debtEventCallbacks = append(s.debtEventCallbacks, cb) +} + +func (s *Stream) EmitDebtEvent(e max.DebtEvent) { + for _, cb := range s.debtEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnAccountSnapshotEvent(cb func(e max.AccountSnapshotEvent)) { + s.accountSnapshotEventCallbacks = append(s.accountSnapshotEventCallbacks, cb) +} + +func (s *Stream) EmitAccountSnapshotEvent(e max.AccountSnapshotEvent) { + for _, cb := range s.accountSnapshotEventCallbacks { + cb(e) + } +} + +func (s *Stream) OnAccountUpdateEvent(cb func(e max.AccountUpdateEvent)) { + s.accountUpdateEventCallbacks = append(s.accountUpdateEventCallbacks, cb) +} + +func (s *Stream) EmitAccountUpdateEvent(e max.AccountUpdateEvent) { + for _, cb := range s.accountUpdateEventCallbacks { + cb(e) + } +} diff --git a/pkg/exchange/max/ticker_test.go b/pkg/exchange/max/ticker_test.go new file mode 100644 index 0000000000..6ef459e1a7 --- /dev/null +++ b/pkg/exchange/max/ticker_test.go @@ -0,0 +1,54 @@ +package max + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestExchange_QueryTickers_AllSymbols(t *testing.T) { + key := os.Getenv("MAX_API_KEY") + secret := os.Getenv("MAX_API_SECRET") + if len(key) == 0 && len(secret) == 0 { + t.Skip("api key/secret are not configured") + return + } + + e := New(key, secret) + got, err := e.QueryTickers(context.Background()) + if assert.NoError(t, err) { + assert.True(t, len(got) > 1, "max: attempting to get all symbol tickers, but get 1 or less") + } +} + +func TestExchange_QueryTickers_SomeSymbols(t *testing.T) { + key := os.Getenv("MAX_API_KEY") + secret := os.Getenv("MAX_API_SECRET") + if len(key) == 0 && len(secret) == 0 { + t.Skip("api key/secret are not configured") + return + } + + e := New(key, secret) + got, err := e.QueryTickers(context.Background(), "BTCUSDT", "ETHUSDT") + if assert.NoError(t, err) { + assert.Len(t, got, 2, "max: attempting to get two symbols, but number of tickers do not match") + } +} + +func TestExchange_QueryTickers_SingleSymbol(t *testing.T) { + key := os.Getenv("MAX_API_KEY") + secret := os.Getenv("MAX_API_SECRET") + if len(key) == 0 && len(secret) == 0 { + t.Skip("api key/secret are not configured") + return + } + + e := New(key, secret) + got, err := e.QueryTickers(context.Background(), "BTCUSDT") + if assert.NoError(t, err) { + assert.Len(t, got, 1, "max: attempting to get 1 symbols, but number of tickers do not match") + } +} diff --git a/pkg/exchange/okex/convert.go b/pkg/exchange/okex/convert.go new file mode 100644 index 0000000000..968544729b --- /dev/null +++ b/pkg/exchange/okex/convert.go @@ -0,0 +1,279 @@ +package okex + +import ( + "fmt" + "regexp" + "strconv" + "strings" + + "github.com/pkg/errors" + + "github.com/c9s/bbgo/pkg/exchange/okex/okexapi" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func toGlobalSymbol(symbol string) string { + return strings.ReplaceAll(symbol, "-", "") +} + +// //go:generate sh -c "echo \"package okex\nvar spotSymbolMap = map[string]string{\n\" $(curl -s -L 'https://okex.com/api/v5/public/instruments?instType=SPOT' | jq -r '.data[] | \"\\(.instId | sub(\"-\" ; \"\") | tojson ): \\( .instId | tojson),\n\"') \"\n}\" > symbols.go" +//go:generate go run gensymbols.go +func toLocalSymbol(symbol string) string { + if s, ok := spotSymbolMap[symbol]; ok { + return s + } + + log.Errorf("failed to look up local symbol from %s", symbol) + return symbol +} + +func toGlobalTicker(marketTicker okexapi.MarketTicker) *types.Ticker { + return &types.Ticker{ + Time: marketTicker.Timestamp.Time(), + Volume: marketTicker.Volume24H, + Last: marketTicker.Last, + Open: marketTicker.Open24H, + High: marketTicker.High24H, + Low: marketTicker.Low24H, + Buy: marketTicker.BidPrice, + Sell: marketTicker.AskPrice, + } +} + +func toGlobalBalance(account *okexapi.Account) types.BalanceMap { + var balanceMap = types.BalanceMap{} + for _, balanceDetail := range account.Details { + balanceMap[balanceDetail.Currency] = types.Balance{ + Currency: balanceDetail.Currency, + Available: balanceDetail.CashBalance, + Locked: balanceDetail.Frozen, + } + } + return balanceMap +} + +type WebsocketSubscription struct { + Channel string `json:"channel"` + InstrumentID string `json:"instId,omitempty"` + InstrumentType string `json:"instType,omitempty"` +} + +var CandleChannels = []string{ + "candle1Y", + "candle6M", "candle3M", "candle1M", + "candle1W", + "candle1D", "candle2D", "candle3D", "candle5D", + "candle12H", "candle6H", "candle4H", "candle2H", "candle1H", + "candle30m", "candle15m", "candle5m", "candle3m", "candle1m", +} + +func convertIntervalToCandle(interval types.Interval) string { + s := interval.String() + switch s { + + case "1h", "2h", "4h", "6h", "12h", "1d", "3d": + return "candle" + strings.ToUpper(s) + + case "1m", "5m", "15m", "30m": + return "candle" + s + + } + + return "candle" + s +} + +func convertSubscription(s types.Subscription) (WebsocketSubscription, error) { + // binance uses lower case symbol name, + // for kline, it's "@kline_" + // for depth, it's "@depth OR @depth@100ms" + switch s.Channel { + case types.KLineChannel: + // Channel names are: + return WebsocketSubscription{ + Channel: convertIntervalToCandle(s.Options.Interval), + InstrumentID: toLocalSymbol(s.Symbol), + }, nil + + case types.BookChannel: + return WebsocketSubscription{ + Channel: "books", + InstrumentID: toLocalSymbol(s.Symbol), + }, nil + case types.BookTickerChannel: + return WebsocketSubscription{ + Channel: "books5", + InstrumentID: toLocalSymbol(s.Symbol), + }, nil + } + + return WebsocketSubscription{}, fmt.Errorf("unsupported public stream channel %s", s.Channel) +} + +func toLocalSideType(side types.SideType) okexapi.SideType { + return okexapi.SideType(strings.ToLower(string(side))) +} + +func segmentOrderDetails(orderDetails []okexapi.OrderDetails) (trades, orders []okexapi.OrderDetails) { + for _, orderDetail := range orderDetails { + if len(orderDetail.LastTradeID) > 0 { + trades = append(trades, orderDetail) + } + orders = append(orders, orderDetail) + } + return trades, orders +} + +func toGlobalTrades(orderDetails []okexapi.OrderDetails) ([]types.Trade, error) { + var trades []types.Trade + for _, orderDetail := range orderDetails { + tradeID, err := strconv.ParseInt(orderDetail.LastTradeID, 10, 64) + if err != nil { + return trades, errors.Wrapf(err, "error parsing tradeId value: %s", orderDetail.LastTradeID) + } + + orderID, err := strconv.ParseInt(orderDetail.OrderID, 10, 64) + if err != nil { + return trades, errors.Wrapf(err, "error parsing ordId value: %s", orderDetail.OrderID) + } + + side := types.SideType(strings.ToUpper(string(orderDetail.Side))) + + trades = append(trades, types.Trade{ + ID: uint64(tradeID), + OrderID: uint64(orderID), + Exchange: types.ExchangeOKEx, + Price: orderDetail.LastFilledPrice, + Quantity: orderDetail.LastFilledQuantity, + QuoteQuantity: orderDetail.LastFilledPrice.Mul(orderDetail.LastFilledQuantity), + Symbol: toGlobalSymbol(orderDetail.InstrumentID), + Side: side, + IsBuyer: side == types.SideTypeBuy, + IsMaker: orderDetail.ExecutionType == "M", + Time: types.Time(orderDetail.LastFilledTime), + Fee: orderDetail.LastFilledFee, + FeeCurrency: orderDetail.LastFilledFeeCurrency, + IsMargin: false, + IsIsolated: false, + }) + } + + return trades, nil +} + +func toGlobalOrders(orderDetails []okexapi.OrderDetails) ([]types.Order, error) { + var orders []types.Order + for _, orderDetail := range orderDetails { + orderID, err := strconv.ParseInt(orderDetail.OrderID, 10, 64) + if err != nil { + return orders, err + } + + side := types.SideType(strings.ToUpper(string(orderDetail.Side))) + + orderType, err := toGlobalOrderType(orderDetail.OrderType) + if err != nil { + return orders, err + } + + timeInForce := types.TimeInForceGTC + switch orderDetail.OrderType { + case okexapi.OrderTypeFOK: + timeInForce = types.TimeInForceFOK + case okexapi.OrderTypeIOC: + timeInForce = types.TimeInForceIOC + + } + + orderStatus, err := toGlobalOrderStatus(orderDetail.State) + if err != nil { + return orders, err + } + + isWorking := false + switch orderStatus { + case types.OrderStatusNew, types.OrderStatusPartiallyFilled: + isWorking = true + + } + + orders = append(orders, types.Order{ + SubmitOrder: types.SubmitOrder{ + ClientOrderID: orderDetail.ClientOrderID, + Symbol: toGlobalSymbol(orderDetail.InstrumentID), + Side: side, + Type: orderType, + Price: orderDetail.Price, + Quantity: orderDetail.Quantity, + StopPrice: fixedpoint.Zero, // not supported yet + TimeInForce: timeInForce, + }, + Exchange: types.ExchangeOKEx, + OrderID: uint64(orderID), + Status: orderStatus, + ExecutedQuantity: orderDetail.FilledQuantity, + IsWorking: isWorking, + CreationTime: types.Time(orderDetail.CreationTime), + UpdateTime: types.Time(orderDetail.UpdateTime), + IsMargin: false, + IsIsolated: false, + }) + } + + return orders, nil +} + +func toGlobalOrderStatus(state okexapi.OrderState) (types.OrderStatus, error) { + switch state { + case okexapi.OrderStateCanceled: + return types.OrderStatusCanceled, nil + case okexapi.OrderStateLive: + return types.OrderStatusNew, nil + case okexapi.OrderStatePartiallyFilled: + return types.OrderStatusPartiallyFilled, nil + case okexapi.OrderStateFilled: + return types.OrderStatusFilled, nil + + } + + return "", fmt.Errorf("unknown or unsupported okex order state: %s", state) +} + +func toLocalOrderType(orderType types.OrderType) (okexapi.OrderType, error) { + switch orderType { + case types.OrderTypeMarket: + return okexapi.OrderTypeMarket, nil + + case types.OrderTypeLimit: + return okexapi.OrderTypeLimit, nil + + case types.OrderTypeLimitMaker: + return okexapi.OrderTypePostOnly, nil + + } + + return "", fmt.Errorf("unknown or unsupported okex order type: %s", orderType) +} + +func toGlobalOrderType(orderType okexapi.OrderType) (types.OrderType, error) { + switch orderType { + case okexapi.OrderTypeMarket: + return types.OrderTypeMarket, nil + case okexapi.OrderTypeLimit: + return types.OrderTypeLimit, nil + case okexapi.OrderTypePostOnly: + return types.OrderTypeLimitMaker, nil + + case okexapi.OrderTypeFOK: + case okexapi.OrderTypeIOC: + + } + return "", fmt.Errorf("unknown or unsupported okex order type: %s", orderType) +} + +func toLocalInterval(src string) string { + var re = regexp.MustCompile(`\d+[hdw]`) + return re.ReplaceAllStringFunc(src, func(w string) string { + return strings.ToUpper(w) + }) +} diff --git a/pkg/exchange/okex/exchange.go b/pkg/exchange/okex/exchange.go new file mode 100644 index 0000000000..ab8dcddd21 --- /dev/null +++ b/pkg/exchange/okex/exchange.go @@ -0,0 +1,321 @@ +package okex + +import ( + "context" + "math" + "strconv" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "golang.org/x/time/rate" + + "github.com/c9s/bbgo/pkg/exchange/okex/okexapi" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +var marketDataLimiter = rate.NewLimiter(rate.Every(time.Second/10), 1) + +// OKB is the platform currency of OKEx, pre-allocate static string here +const OKB = "OKB" + +var log = logrus.WithFields(logrus.Fields{ + "exchange": "okex", +}) + +type Exchange struct { + key, secret, passphrase string + + client *okexapi.RestClient +} + +func New(key, secret, passphrase string) *Exchange { + client := okexapi.NewClient() + + if len(key) > 0 && len(secret) > 0 { + client.Auth(key, secret, passphrase) + } + + return &Exchange{ + key: key, + // pragma: allowlist nextline secret + secret: secret, + passphrase: passphrase, + client: client, + } +} + +func (e *Exchange) Name() types.ExchangeName { + return types.ExchangeOKEx +} + +func (e *Exchange) QueryMarkets(ctx context.Context) (types.MarketMap, error) { + instruments, err := e.client.PublicDataService.NewGetInstrumentsRequest(). + InstrumentType(okexapi.InstrumentTypeSpot). + Do(ctx) + + if err != nil { + return nil, err + } + + markets := types.MarketMap{} + for _, instrument := range instruments { + symbol := toGlobalSymbol(instrument.InstrumentID) + market := types.Market{ + Symbol: symbol, + LocalSymbol: instrument.InstrumentID, + + QuoteCurrency: instrument.QuoteCurrency, + BaseCurrency: instrument.BaseCurrency, + + // convert tick size OKEx to precision + PricePrecision: int(-math.Log10(instrument.TickSize.Float64())), + VolumePrecision: int(-math.Log10(instrument.LotSize.Float64())), + + // TickSize: OKEx's price tick, for BTC-USDT it's "0.1" + TickSize: instrument.TickSize, + + // Quantity step size, for BTC-USDT, it's "0.00000001" + StepSize: instrument.LotSize, + + // for BTC-USDT, it's "0.00001" + MinQuantity: instrument.MinSize, + + // OKEx does not offer minimal notional, use 1 USD here. + MinNotional: fixedpoint.One, + MinAmount: fixedpoint.One, + } + markets[symbol] = market + } + + return markets, nil +} + +func (e *Exchange) QueryTicker(ctx context.Context, symbol string) (*types.Ticker, error) { + symbol = toLocalSymbol(symbol) + + marketTicker, err := e.client.MarketTicker(symbol) + if err != nil { + return nil, err + } + + return toGlobalTicker(*marketTicker), nil +} + +func (e *Exchange) QueryTickers(ctx context.Context, symbols ...string) (map[string]types.Ticker, error) { + marketTickers, err := e.client.MarketTickers(okexapi.InstrumentTypeSpot) + if err != nil { + return nil, err + } + + tickers := make(map[string]types.Ticker) + for _, marketTicker := range marketTickers { + symbol := toGlobalSymbol(marketTicker.InstrumentID) + ticker := toGlobalTicker(marketTicker) + tickers[symbol] = *ticker + } + + if len(symbols) == 0 { + return tickers, nil + } + + selectedTickers := make(map[string]types.Ticker, len(symbols)) + for _, symbol := range symbols { + if ticker, ok := tickers[symbol]; ok { + selectedTickers[symbol] = ticker + } + } + + return selectedTickers, nil +} + +func (e *Exchange) PlatformFeeCurrency() string { + return OKB +} + +func (e *Exchange) QueryAccount(ctx context.Context) (*types.Account, error) { + accountBalance, err := e.client.AccountBalances() + if err != nil { + return nil, err + } + + var account = types.Account{ + AccountType: "SPOT", + } + + var balanceMap = toGlobalBalance(accountBalance) + account.UpdateBalances(balanceMap) + return &account, nil +} + +func (e *Exchange) QueryAccountBalances(ctx context.Context) (types.BalanceMap, error) { + accountBalances, err := e.client.AccountBalances() + if err != nil { + return nil, err + } + + var balanceMap = toGlobalBalance(accountBalances) + return balanceMap, nil +} + +func (e *Exchange) SubmitOrders(ctx context.Context, orders ...types.SubmitOrder) (createdOrders types.OrderSlice, err error) { + var reqs []*okexapi.PlaceOrderRequest + for _, order := range orders { + orderReq := e.client.TradeService.NewPlaceOrderRequest() + + orderType, err := toLocalOrderType(order.Type) + if err != nil { + return nil, err + } + + orderReq.InstrumentID(toLocalSymbol(order.Symbol)) + orderReq.Side(toLocalSideType(order.Side)) + + if order.Market.Symbol != "" { + orderReq.Quantity(order.Market.FormatQuantity(order.Quantity)) + } else { + // TODO report error + orderReq.Quantity(order.Quantity.FormatString(8)) + } + + // set price field for limit orders + switch order.Type { + case types.OrderTypeStopLimit, types.OrderTypeLimit: + if order.Market.Symbol != "" { + orderReq.Price(order.Market.FormatPrice(order.Price)) + } else { + // TODO report error + orderReq.Price(order.Price.FormatString(8)) + } + } + + switch order.TimeInForce { + case "FOK": + orderReq.OrderType(okexapi.OrderTypeFOK) + case "IOC": + orderReq.OrderType(okexapi.OrderTypeIOC) + default: + orderReq.OrderType(orderType) + } + + reqs = append(reqs, orderReq) + } + + batchReq := e.client.TradeService.NewBatchPlaceOrderRequest() + batchReq.Add(reqs...) + orderHeads, err := batchReq.Do(ctx) + if err != nil { + return nil, err + } + + for idx, orderHead := range orderHeads { + orderID, err := strconv.ParseInt(orderHead.OrderID, 10, 64) + if err != nil { + return createdOrders, err + } + + submitOrder := orders[idx] + createdOrders = append(createdOrders, types.Order{ + SubmitOrder: submitOrder, + Exchange: types.ExchangeOKEx, + OrderID: uint64(orderID), + Status: types.OrderStatusNew, + ExecutedQuantity: fixedpoint.Zero, + IsWorking: true, + CreationTime: types.Time(time.Now()), + UpdateTime: types.Time(time.Now()), + IsMargin: false, + IsIsolated: false, + }) + } + + return createdOrders, nil +} + +func (e *Exchange) QueryOpenOrders(ctx context.Context, symbol string) (orders []types.Order, err error) { + instrumentID := toLocalSymbol(symbol) + req := e.client.TradeService.NewGetPendingOrderRequest().InstrumentType(okexapi.InstrumentTypeSpot).InstrumentID(instrumentID) + orderDetails, err := req.Do(ctx) + if err != nil { + return orders, err + } + + orders, err = toGlobalOrders(orderDetails) + return orders, err +} + +func (e *Exchange) CancelOrders(ctx context.Context, orders ...types.Order) error { + if len(orders) == 0 { + return nil + } + + var reqs []*okexapi.CancelOrderRequest + for _, order := range orders { + if len(order.Symbol) == 0 { + return errors.New("symbol is required for canceling an okex order") + } + + req := e.client.TradeService.NewCancelOrderRequest() + req.InstrumentID(toLocalSymbol(order.Symbol)) + req.OrderID(strconv.FormatUint(order.OrderID, 10)) + if len(order.ClientOrderID) > 0 { + req.ClientOrderID(order.ClientOrderID) + } + reqs = append(reqs, req) + } + + batchReq := e.client.TradeService.NewBatchCancelOrderRequest() + batchReq.Add(reqs...) + _, err := batchReq.Do(ctx) + return err +} + +func (e *Exchange) NewStream() types.Stream { + return NewStream(e.client) +} + +func (e *Exchange) QueryKLines(ctx context.Context, symbol string, interval types.Interval, options types.KLineQueryOptions) ([]types.KLine, error) { + if err := marketDataLimiter.Wait(ctx); err != nil { + return nil, err + } + + intervalParam := toLocalInterval(interval.String()) + + req := e.client.MarketDataService.NewCandlesticksRequest(toLocalSymbol(symbol)) + req.Bar(intervalParam) + + if options.StartTime != nil { + req.After(options.StartTime.Unix()) + } + + if options.EndTime != nil { + req.Before(options.EndTime.Unix()) + } + + candles, err := req.Do(ctx) + if err != nil { + return nil, err + } + + var klines []types.KLine + for _, candle := range candles { + klines = append(klines, types.KLine{ + Exchange: types.ExchangeOKEx, + Symbol: symbol, + Interval: interval, + Open: candle.Open, + High: candle.High, + Low: candle.Low, + Close: candle.Close, + Closed: true, + Volume: candle.Volume, + QuoteVolume: candle.VolumeInCurrency, + StartTime: types.Time(candle.Time), + EndTime: types.Time(candle.Time.Add(interval.Duration() - time.Millisecond)), + }) + } + + return klines, nil + +} diff --git a/pkg/exchange/okex/gensymbols.go b/pkg/exchange/okex/gensymbols.go new file mode 100644 index 0000000000..be27065e1a --- /dev/null +++ b/pkg/exchange/okex/gensymbols.go @@ -0,0 +1,52 @@ +//go:build ignore +// +build ignore + +package main + +import ( + "context" + "log" + "os" + "strings" + "text/template" + + "github.com/c9s/bbgo/pkg/exchange/okex/okexapi" +) + +var packageTemplate = template.Must(template.New("").Parse(`// Code generated by go generate; DO NOT EDIT. +package okex + +var spotSymbolMap = map[string]string{ +{{- range $k, $v := . }} + {{ printf "%q" $k }}: {{ printf "%q" $v }}, +{{- end }} +} + +`)) + +func main() { + ctx := context.Background() + client := okexapi.NewClient() + instruments, err := client.PublicDataService.NewGetInstrumentsRequest().InstrumentType(okexapi.InstrumentTypeSpot).Do(ctx) + if err != nil { + log.Fatal(err) + } + + var data = map[string]string{} + for _, instrument := range instruments { + symbol := strings.ReplaceAll(instrument.InstrumentID, "-", "") + data[symbol] = instrument.InstrumentID + } + + f, err := os.Create("symbols.go") + if err != nil { + log.Fatal(err) + } + + defer f.Close() + + err = packageTemplate.Execute(f, data) + if err != nil { + log.Fatal(err) + } +} diff --git a/pkg/exchange/okex/okexapi/cancel_order_request_accessors.go b/pkg/exchange/okex/okexapi/cancel_order_request_accessors.go new file mode 100644 index 0000000000..aaaf3060ba --- /dev/null +++ b/pkg/exchange/okex/okexapi/cancel_order_request_accessors.go @@ -0,0 +1,76 @@ +// Code generated by "requestgen -type CancelOrderRequest"; DO NOT EDIT. + +package okexapi + +import ( + "encoding/json" + "fmt" + "net/url" +) + +func (c *CancelOrderRequest) InstrumentID(instrumentID string) *CancelOrderRequest { + c.instrumentID = instrumentID + return c +} + +func (c *CancelOrderRequest) OrderID(orderID string) *CancelOrderRequest { + c.orderID = &orderID + return c +} + +func (c *CancelOrderRequest) ClientOrderID(clientOrderID string) *CancelOrderRequest { + c.clientOrderID = &clientOrderID + return c +} + +func (c *CancelOrderRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + // check instrumentID field -> json key instId + instrumentID := c.instrumentID + + // assign parameter of instrumentID + params["instId"] = instrumentID + + // check orderID field -> json key ordId + if c.orderID != nil { + orderID := *c.orderID + + // assign parameter of orderID + params["ordId"] = orderID + } + + // check clientOrderID field -> json key clOrdId + if c.clientOrderID != nil { + clientOrderID := *c.clientOrderID + + // assign parameter of clientOrderID + params["clOrdId"] = clientOrderID + } + + return params, nil +} + +func (c *CancelOrderRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := c.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +func (c *CancelOrderRequest) GetParametersJSON() ([]byte, error) { + params, err := c.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} diff --git a/pkg/exchange/okex/okexapi/client.go b/pkg/exchange/okex/okexapi/client.go new file mode 100644 index 0000000000..626e841603 --- /dev/null +++ b/pkg/exchange/okex/okexapi/client.go @@ -0,0 +1,406 @@ +package okexapi + +import ( + "bytes" + "crypto/hmac" + "crypto/sha256" + "encoding/base64" + "encoding/json" + "fmt" + "net/http" + "net/url" + "strings" + "time" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" + "github.com/c9s/bbgo/pkg/util" + "github.com/pkg/errors" +) + +const defaultHTTPTimeout = time.Second * 15 +const RestBaseURL = "https://www.okex.com/" +const PublicWebSocketURL = "wss://ws.okex.com:8443/ws/v5/public" +const PrivateWebSocketURL = "wss://ws.okex.com:8443/ws/v5/private" + +type SideType string + +const ( + SideTypeBuy SideType = "buy" + SideTypeSell SideType = "sell" +) + +type OrderType string + +const ( + OrderTypeMarket OrderType = "market" + OrderTypeLimit OrderType = "limit" + OrderTypePostOnly OrderType = "post_only" + OrderTypeFOK OrderType = "fok" + OrderTypeIOC OrderType = "ioc" +) + +type InstrumentType string + +const ( + InstrumentTypeSpot InstrumentType = "SPOT" + InstrumentTypeSwap InstrumentType = "SWAP" + InstrumentTypeFutures InstrumentType = "FUTURES" + InstrumentTypeOption InstrumentType = "OPTION" +) + +type OrderState string + +const ( + OrderStateCanceled OrderState = "canceled" + OrderStateLive OrderState = "live" + OrderStatePartiallyFilled OrderState = "partially_filled" + OrderStateFilled OrderState = "filled" +) + +type RestClient struct { + BaseURL *url.URL + + client *http.Client + + Key, Secret, Passphrase string + + TradeService *TradeService + PublicDataService *PublicDataService + MarketDataService *MarketDataService +} + +func NewClient() *RestClient { + u, err := url.Parse(RestBaseURL) + if err != nil { + panic(err) + } + + client := &RestClient{ + BaseURL: u, + client: &http.Client{ + Timeout: defaultHTTPTimeout, + }, + } + + client.TradeService = &TradeService{client: client} + client.PublicDataService = &PublicDataService{client: client} + client.MarketDataService = &MarketDataService{client: client} + return client +} + +func (c *RestClient) Auth(key, secret, passphrase string) { + c.Key = key + // pragma: allowlist nextline secret + c.Secret = secret + c.Passphrase = passphrase +} + +// NewRequest create new API request. Relative url can be provided in refURL. +func (c *RestClient) newRequest(method, refURL string, params url.Values, body []byte) (*http.Request, error) { + rel, err := url.Parse(refURL) + if err != nil { + return nil, err + } + + if params != nil { + rel.RawQuery = params.Encode() + } + + pathURL := c.BaseURL.ResolveReference(rel) + return http.NewRequest(method, pathURL.String(), bytes.NewReader(body)) +} + +// sendRequest sends the request to the API server and handle the response +func (c *RestClient) sendRequest(req *http.Request) (*util.Response, error) { + resp, err := c.client.Do(req) + if err != nil { + return nil, err + } + + // newResponse reads the response body and return a new Response object + response, err := util.NewResponse(resp) + if err != nil { + return response, err + } + + // Check error, if there is an error, return the ErrorResponse struct type + if response.IsError() { + return response, errors.New(string(response.Body)) + } + + return response, nil +} + +// newAuthenticatedRequest creates new http request for authenticated routes. +func (c *RestClient) newAuthenticatedRequest(method, refURL string, params url.Values, payload interface{}) (*http.Request, error) { + if len(c.Key) == 0 { + return nil, errors.New("empty api key") + } + + if len(c.Secret) == 0 { + return nil, errors.New("empty api secret") + } + + rel, err := url.Parse(refURL) + if err != nil { + return nil, err + } + + if params != nil { + rel.RawQuery = params.Encode() + } + + pathURL := c.BaseURL.ResolveReference(rel) + path := pathURL.Path + if rel.RawQuery != "" { + path += "?" + rel.RawQuery + } + + // set location to UTC so that it outputs "2020-12-08T09:08:57.715Z" + t := time.Now().In(time.UTC) + timestamp := t.Format("2006-01-02T15:04:05.999Z07:00") + + var body []byte + + if payload != nil { + switch v := payload.(type) { + case string: + body = []byte(v) + + case []byte: + body = v + + default: + body, err = json.Marshal(v) + if err != nil { + return nil, err + } + } + } + + signKey := timestamp + strings.ToUpper(method) + path + string(body) + signature := Sign(signKey, c.Secret) + + req, err := http.NewRequest(method, pathURL.String(), bytes.NewReader(body)) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", "application/json") + req.Header.Add("Accept", "application/json") + req.Header.Add("OK-ACCESS-KEY", c.Key) + req.Header.Add("OK-ACCESS-SIGN", signature) + req.Header.Add("OK-ACCESS-TIMESTAMP", timestamp) + req.Header.Add("OK-ACCESS-PASSPHRASE", c.Passphrase) + return req, nil +} + +type BalanceDetail struct { + Currency string `json:"ccy"` + Available fixedpoint.Value `json:"availEq"` + CashBalance fixedpoint.Value `json:"cashBal"` + OrderFrozen fixedpoint.Value `json:"ordFrozen"` + Frozen fixedpoint.Value `json:"frozenBal"` + Equity fixedpoint.Value `json:"eq"` + EquityInUSD fixedpoint.Value `json:"eqUsd"` + UpdateTime types.MillisecondTimestamp `json:"uTime"` + UnrealizedProfitAndLoss fixedpoint.Value `json:"upl"` +} + +type Account struct { + TotalEquityInUSD fixedpoint.Value `json:"totalEq"` + UpdateTime string `json:"uTime"` + Details []BalanceDetail `json:"details"` +} + +func (c *RestClient) AccountBalances() (*Account, error) { + req, err := c.newAuthenticatedRequest("GET", "/api/v5/account/balance", nil, nil) + if err != nil { + return nil, err + } + + response, err := c.sendRequest(req) + if err != nil { + return nil, err + } + + var balanceResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []Account `json:"data"` + } + + if err := response.DecodeJSON(&balanceResponse); err != nil { + return nil, err + } + + if len(balanceResponse.Data) == 0 { + return nil, errors.New("empty account data") + } + + return &balanceResponse.Data[0], nil +} + +type AssetBalance struct { + Currency string `json:"ccy"` + Balance fixedpoint.Value `json:"bal"` + Frozen fixedpoint.Value `json:"frozenBal,omitempty"` + Available fixedpoint.Value `json:"availBal,omitempty"` +} + +type AssetBalanceList []AssetBalance + +func (c *RestClient) AssetBalances() (AssetBalanceList, error) { + req, err := c.newAuthenticatedRequest("GET", "/api/v5/asset/balances", nil, nil) + if err != nil { + return nil, err + } + + response, err := c.sendRequest(req) + if err != nil { + return nil, err + } + + var balanceResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data AssetBalanceList `json:"data"` + } + if err := response.DecodeJSON(&balanceResponse); err != nil { + return nil, err + } + + return balanceResponse.Data, nil +} + +type AssetCurrency struct { + Currency string `json:"ccy"` + Name string `json:"name"` + Chain string `json:"chain"` + CanDeposit bool `json:"canDep"` + CanWithdraw bool `json:"canWd"` + CanInternal bool `json:"canInternal"` + MinWithdrawalFee fixedpoint.Value `json:"minFee"` + MaxWithdrawalFee fixedpoint.Value `json:"maxFee"` + MinWithdrawalThreshold fixedpoint.Value `json:"minWd"` +} + +func (c *RestClient) AssetCurrencies() ([]AssetCurrency, error) { + req, err := c.newAuthenticatedRequest("GET", "/api/v5/asset/currencies", nil, nil) + if err != nil { + return nil, err + } + + response, err := c.sendRequest(req) + if err != nil { + return nil, err + } + + var currencyResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []AssetCurrency `json:"data"` + } + + if err := response.DecodeJSON(¤cyResponse); err != nil { + return nil, err + } + + return currencyResponse.Data, nil +} + +type MarketTicker struct { + InstrumentType string `json:"instType"` + InstrumentID string `json:"instId"` + + // last traded price + Last fixedpoint.Value `json:"last"` + + // last traded size + LastSize fixedpoint.Value `json:"lastSz"` + + AskPrice fixedpoint.Value `json:"askPx"` + AskSize fixedpoint.Value `json:"askSz"` + + BidPrice fixedpoint.Value `json:"bidPx"` + BidSize fixedpoint.Value `json:"bidSz"` + + Open24H fixedpoint.Value `json:"open24h"` + High24H fixedpoint.Value `json:"high24H"` + Low24H fixedpoint.Value `json:"low24H"` + Volume24H fixedpoint.Value `json:"vol24h"` + VolumeCurrency24H fixedpoint.Value `json:"volCcy24h"` + + // Millisecond timestamp + Timestamp types.MillisecondTimestamp `json:"ts"` +} + +func (c *RestClient) MarketTicker(instId string) (*MarketTicker, error) { + // SPOT, SWAP, FUTURES, OPTION + var params = url.Values{} + params.Add("instId", instId) + + req, err := c.newRequest("GET", "/api/v5/market/ticker", params, nil) + if err != nil { + return nil, err + } + + response, err := c.sendRequest(req) + if err != nil { + return nil, err + } + + var tickerResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []MarketTicker `json:"data"` + } + if err := response.DecodeJSON(&tickerResponse); err != nil { + return nil, err + } + + if len(tickerResponse.Data) == 0 { + return nil, fmt.Errorf("ticker of %s not found", instId) + } + + return &tickerResponse.Data[0], nil +} + +func (c *RestClient) MarketTickers(instType InstrumentType) ([]MarketTicker, error) { + // SPOT, SWAP, FUTURES, OPTION + var params = url.Values{} + params.Add("instType", string(instType)) + + req, err := c.newRequest("GET", "/api/v5/market/tickers", params, nil) + if err != nil { + return nil, err + } + + response, err := c.sendRequest(req) + if err != nil { + return nil, err + } + + var tickerResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []MarketTicker `json:"data"` + } + if err := response.DecodeJSON(&tickerResponse); err != nil { + return nil, err + } + + return tickerResponse.Data, nil +} + +func Sign(payload string, secret string) string { + var sig = hmac.New(sha256.New, []byte(secret)) + _, err := sig.Write([]byte(payload)) + if err != nil { + return "" + } + + return base64.StdEncoding.EncodeToString(sig.Sum(nil)) + // return hex.EncodeToString(sig.Sum(nil)) +} diff --git a/pkg/exchange/okex/okexapi/market.go b/pkg/exchange/okex/okexapi/market.go new file mode 100644 index 0000000000..b9b46c43f2 --- /dev/null +++ b/pkg/exchange/okex/okexapi/market.go @@ -0,0 +1,268 @@ +package okexapi + +import ( + "context" + "fmt" + "net/url" + "strconv" + "time" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type Candle struct { + InstrumentID string + Interval string + Time time.Time + Open fixedpoint.Value + High fixedpoint.Value + Low fixedpoint.Value + Close fixedpoint.Value + Volume fixedpoint.Value + VolumeInCurrency fixedpoint.Value +} + +type CandlesticksRequest struct { + client *RestClient + + instId string `param:"instId"` + + limit *int `param:"limit"` + + bar *string `param:"bar"` + + after *int64 `param:"after,seconds"` + + before *int64 `param:"before,seconds"` +} + +func (r *CandlesticksRequest) After(after int64) *CandlesticksRequest { + r.after = &after + return r +} + +func (r *CandlesticksRequest) Before(before int64) *CandlesticksRequest { + r.before = &before + return r +} + +func (r *CandlesticksRequest) Bar(bar string) *CandlesticksRequest { + r.bar = &bar + return r +} + +func (r *CandlesticksRequest) Limit(limit int) *CandlesticksRequest { + r.limit = &limit + return r +} + +func (r *CandlesticksRequest) InstrumentID(instId string) *CandlesticksRequest { + r.instId = instId + return r +} + +func (r *CandlesticksRequest) Do(ctx context.Context) ([]Candle, error) { + // SPOT, SWAP, FUTURES, OPTION + var params = url.Values{} + params.Add("instId", r.instId) + + if r.bar != nil { + params.Add("bar", *r.bar) + } + + if r.before != nil { + params.Add("before", strconv.FormatInt(*r.before, 10)) + } + + if r.after != nil { + params.Add("after", strconv.FormatInt(*r.after, 10)) + } + + if r.limit != nil { + params.Add("limit", strconv.Itoa(*r.limit)) + } + + req, err := r.client.newRequest("GET", "/api/v5/market/candles", params, nil) + if err != nil { + return nil, err + } + + resp, err := r.client.sendRequest(req) + if err != nil { + return nil, err + } + + type candleEntry [7]string + var candlesResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []candleEntry `json:"data"` + } + + if err := resp.DecodeJSON(&candlesResponse); err != nil { + return nil, err + } + + var candles []Candle + for _, entry := range candlesResponse.Data { + timestamp, err := strconv.ParseInt(entry[0], 10, 64) + if err != nil { + return candles, err + } + + open, err := fixedpoint.NewFromString(entry[1]) + if err != nil { + return candles, err + } + + high, err := fixedpoint.NewFromString(entry[2]) + if err != nil { + return candles, err + } + + low, err := fixedpoint.NewFromString(entry[3]) + if err != nil { + return candles, err + } + + cls, err := fixedpoint.NewFromString(entry[4]) + if err != nil { + return candles, err + } + + vol, err := fixedpoint.NewFromString(entry[5]) + if err != nil { + return candles, err + } + + volCcy, err := fixedpoint.NewFromString(entry[6]) + if err != nil { + return candles, err + } + + var interval = "1m" + if r.bar != nil { + interval = *r.bar + } + + candles = append(candles, Candle{ + InstrumentID: r.instId, + Interval: interval, + Time: time.Unix(0, timestamp*int64(time.Millisecond)), + Open: open, + High: high, + Low: low, + Close: cls, + Volume: vol, + VolumeInCurrency: volCcy, + }) + } + + return candles, nil +} + +type MarketTickersRequest struct { + client *RestClient + + instType string +} + +func (r *MarketTickersRequest) InstrumentType(instType string) *MarketTickersRequest { + r.instType = instType + return r +} + +func (r *MarketTickersRequest) Do(ctx context.Context) ([]MarketTicker, error) { + // SPOT, SWAP, FUTURES, OPTION + var params = url.Values{} + params.Add("instType", string(r.instType)) + + req, err := r.client.newRequest("GET", "/api/v5/market/tickers", params, nil) + if err != nil { + return nil, err + } + + response, err := r.client.sendRequest(req) + if err != nil { + return nil, err + } + + var tickerResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []MarketTicker `json:"data"` + } + + if err := response.DecodeJSON(&tickerResponse); err != nil { + return nil, err + } + + return tickerResponse.Data, nil +} + +type MarketTickerRequest struct { + client *RestClient + + instId string +} + +func (r *MarketTickerRequest) InstrumentID(instId string) *MarketTickerRequest { + r.instId = instId + return r +} + +func (r *MarketTickerRequest) Do(ctx context.Context) (*MarketTicker, error) { + // SPOT, SWAP, FUTURES, OPTION + var params = url.Values{} + params.Add("instId", r.instId) + + req, err := r.client.newRequest("GET", "/api/v5/market/ticker", params, nil) + if err != nil { + return nil, err + } + + response, err := r.client.sendRequest(req) + if err != nil { + return nil, err + } + + var tickerResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []MarketTicker `json:"data"` + } + if err := response.DecodeJSON(&tickerResponse); err != nil { + return nil, err + } + + if len(tickerResponse.Data) == 0 { + return nil, fmt.Errorf("ticker of %s not found", r.instId) + } + + return &tickerResponse.Data[0], nil +} + +type MarketDataService struct { + client *RestClient +} + +func (c *MarketDataService) NewMarketTickerRequest(instId string) *MarketTickerRequest { + return &MarketTickerRequest{ + client: c.client, + instId: instId, + } +} + +func (c *MarketDataService) NewMarketTickersRequest(instType string) *MarketTickersRequest { + return &MarketTickersRequest{ + client: c.client, + instType: instType, + } +} + +func (c *MarketDataService) NewCandlesticksRequest(instId string) *CandlesticksRequest { + return &CandlesticksRequest{ + client: c.client, + instId: instId, + } +} diff --git a/pkg/exchange/okex/okexapi/place_order_request_accessors.go b/pkg/exchange/okex/okexapi/place_order_request_accessors.go new file mode 100644 index 0000000000..b272cee113 --- /dev/null +++ b/pkg/exchange/okex/okexapi/place_order_request_accessors.go @@ -0,0 +1,151 @@ +// Code generated by "requestgen -type PlaceOrderRequest"; DO NOT EDIT. + +package okexapi + +import ( + "encoding/json" + "fmt" + "net/url" +) + +func (p *PlaceOrderRequest) InstrumentID(instrumentID string) *PlaceOrderRequest { + p.instrumentID = instrumentID + return p +} + +func (p *PlaceOrderRequest) TradeMode(tradeMode string) *PlaceOrderRequest { + p.tradeMode = tradeMode + return p +} + +func (p *PlaceOrderRequest) ClientOrderID(clientOrderID string) *PlaceOrderRequest { + p.clientOrderID = &clientOrderID + return p +} + +func (p *PlaceOrderRequest) Tag(tag string) *PlaceOrderRequest { + p.tag = &tag + return p +} + +func (p *PlaceOrderRequest) Side(side SideType) *PlaceOrderRequest { + p.side = side + return p +} + +func (p *PlaceOrderRequest) OrderType(orderType OrderType) *PlaceOrderRequest { + p.orderType = orderType + return p +} + +func (p *PlaceOrderRequest) Quantity(quantity string) *PlaceOrderRequest { + p.quantity = quantity + return p +} + +func (p *PlaceOrderRequest) Price(price string) *PlaceOrderRequest { + p.price = &price + return p +} + +func (p *PlaceOrderRequest) GetParameters() (map[string]interface{}, error) { + var params = map[string]interface{}{} + + // check instrumentID field -> json key instId + instrumentID := p.instrumentID + + // assign parameter of instrumentID + params["instId"] = instrumentID + + // check tradeMode field -> json key tdMode + tradeMode := p.tradeMode + + switch tradeMode { + case "cross", "isolated", "cash": + params["tdMode"] = tradeMode + + default: + return params, fmt.Errorf("tdMode value %v is invalid", tradeMode) + + } + + // assign parameter of tradeMode + params["tdMode"] = tradeMode + + // check clientOrderID field -> json key clOrdId + if p.clientOrderID != nil { + clientOrderID := *p.clientOrderID + + // assign parameter of clientOrderID + params["clOrdId"] = clientOrderID + } + + // check tag field -> json key tag + if p.tag != nil { + tag := *p.tag + + // assign parameter of tag + params["tag"] = tag + } + + // check side field -> json key side + side := p.side + + switch side { + case "buy", "sell": + params["side"] = side + + default: + return params, fmt.Errorf("side value %v is invalid", side) + + } + + // assign parameter of side + params["side"] = side + + // check orderType field -> json key ordType + orderType := p.orderType + + // assign parameter of orderType + params["ordType"] = orderType + + // check quantity field -> json key sz + quantity := p.quantity + + // assign parameter of quantity + params["sz"] = quantity + + // check price field -> json key px + if p.price != nil { + price := *p.price + + // assign parameter of price + params["px"] = price + } + + return params, nil +} + +func (p *PlaceOrderRequest) GetParametersQuery() (url.Values, error) { + query := url.Values{} + + params, err := p.GetParameters() + if err != nil { + return query, err + } + + for k, v := range params { + query.Add(k, fmt.Sprintf("%v", v)) + } + + return query, nil +} + +func (p *PlaceOrderRequest) GetParametersJSON() ([]byte, error) { + params, err := p.GetParameters() + if err != nil { + return nil, err + } + + return json.Marshal(params) +} diff --git a/pkg/exchange/okex/okexapi/public.go b/pkg/exchange/okex/okexapi/public.go new file mode 100644 index 0000000000..b877fe6372 --- /dev/null +++ b/pkg/exchange/okex/okexapi/public.go @@ -0,0 +1,146 @@ +package okexapi + +import ( + "context" + "net/url" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" + "github.com/pkg/errors" +) + +type PublicDataService struct { + client *RestClient +} + +func (s *PublicDataService) NewGetInstrumentsRequest() *GetInstrumentsRequest { + return &GetInstrumentsRequest{ + client: s.client, + } +} + +func (s *PublicDataService) NewGetFundingRate() *GetFundingRateRequest { + return &GetFundingRateRequest{ + client: s.client, + } +} + +type FundingRate struct { + InstrumentType string `json:"instType"` + InstrumentID string `json:"instId"` + FundingRate fixedpoint.Value `json:"fundingRate"` + NextFundingRate fixedpoint.Value `json:"nextFundingRate"` + FundingTime types.MillisecondTimestamp `json:"fundingTime"` +} + +type GetFundingRateRequest struct { + client *RestClient + + instId string +} + +func (r *GetFundingRateRequest) InstrumentID(instId string) *GetFundingRateRequest { + r.instId = instId + return r +} + +func (r *GetFundingRateRequest) Do(ctx context.Context) (*FundingRate, error) { + // SPOT, SWAP, FUTURES, OPTION + var params = url.Values{} + params.Add("instId", string(r.instId)) + + req, err := r.client.newRequest("GET", "/api/v5/public/funding-rate", params, nil) + if err != nil { + return nil, err + } + + response, err := r.client.sendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []FundingRate `json:"data"` + } + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + + if len(apiResponse.Data) == 0 { + return nil, errors.New("empty funding rate data") + } + + return &apiResponse.Data[0], nil +} + +type Instrument struct { + InstrumentType string `json:"instType"` + InstrumentID string `json:"instId"` + BaseCurrency string `json:"baseCcy"` + QuoteCurrency string `json:"quoteCcy"` + SettleCurrency string `json:"settleCcy"` + ContractValue string `json:"ctVal"` + ContractMultiplier string `json:"ctMult"` + ContractValueCurrency string `json:"ctValCcy"` + ListTime types.MillisecondTimestamp `json:"listTime"` + ExpiryTime types.MillisecondTimestamp `json:"expTime"` + TickSize fixedpoint.Value `json:"tickSz"` + LotSize fixedpoint.Value `json:"lotSz"` + + // MinSize = min order size + MinSize fixedpoint.Value `json:"minSz"` + + // instrument status + State string `json:"state"` +} + +type GetInstrumentsRequest struct { + client *RestClient + + instType InstrumentType + + instId *string +} + +func (r *GetInstrumentsRequest) InstrumentType(instType InstrumentType) *GetInstrumentsRequest { + r.instType = instType + return r +} + +func (r *GetInstrumentsRequest) InstrumentID(instId string) *GetInstrumentsRequest { + r.instId = &instId + return r +} + +func (r *GetInstrumentsRequest) Do(ctx context.Context) ([]Instrument, error) { + // SPOT, SWAP, FUTURES, OPTION + var params = url.Values{} + params.Add("instType", string(r.instType)) + + if r.instId != nil { + params.Add("instId", *r.instId) + } + + req, err := r.client.newRequest("GET", "/api/v5/public/instruments", params, nil) + if err != nil { + return nil, err + } + + response, err := r.client.sendRequest(req) + if err != nil { + return nil, err + } + + var apiResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []Instrument `json:"data"` + } + if err := response.DecodeJSON(&apiResponse); err != nil { + return nil, err + } + + return apiResponse.Data, nil +} diff --git a/pkg/exchange/okex/okexapi/trade.go b/pkg/exchange/okex/okexapi/trade.go new file mode 100644 index 0000000000..7adc37c986 --- /dev/null +++ b/pkg/exchange/okex/okexapi/trade.go @@ -0,0 +1,520 @@ +package okexapi + +import ( + "context" + "net/url" + "strings" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" + "github.com/pkg/errors" +) + +type TradeService struct { + client *RestClient +} + +type OrderResponse struct { + OrderID string `json:"ordId"` + ClientOrderID string `json:"clOrdId"` + Tag string `json:"tag"` + Code string `json:"sCode"` + Message string `json:"sMsg"` +} + +func (c *TradeService) NewPlaceOrderRequest() *PlaceOrderRequest { + return &PlaceOrderRequest{ + client: c.client, + } +} + +func (c *TradeService) NewBatchPlaceOrderRequest() *BatchPlaceOrderRequest { + return &BatchPlaceOrderRequest{ + client: c.client, + } +} + +func (c *TradeService) NewCancelOrderRequest() *CancelOrderRequest { + return &CancelOrderRequest{ + client: c.client, + } +} + +func (c *TradeService) NewBatchCancelOrderRequest() *BatchCancelOrderRequest { + return &BatchCancelOrderRequest{ + client: c.client, + } +} + +func (c *TradeService) NewGetOrderDetailsRequest() *GetOrderDetailsRequest { + return &GetOrderDetailsRequest{ + client: c.client, + } +} + +func (c *TradeService) NewGetPendingOrderRequest() *GetPendingOrderRequest { + return &GetPendingOrderRequest{ + client: c.client, + } +} + +func (c *TradeService) NewGetTransactionDetailsRequest() *GetTransactionDetailsRequest { + return &GetTransactionDetailsRequest{ + client: c.client, + } +} + +//go:generate requestgen -type PlaceOrderRequest +type PlaceOrderRequest struct { + client *RestClient + + instrumentID string `param:"instId"` + + // tdMode + // margin mode: "cross", "isolated" + // non-margin mode cash + tradeMode string `param:"tdMode" validValues:"cross,isolated,cash"` + + // A combination of case-sensitive alphanumerics, all numbers, or all letters of up to 32 characters. + clientOrderID *string `param:"clOrdId"` + + // A combination of case-sensitive alphanumerics, all numbers, or all letters of up to 8 characters. + tag *string `param:"tag"` + + // "buy" or "sell" + side SideType `param:"side" validValues:"buy,sell"` + + orderType OrderType `param:"ordType"` + + quantity string `param:"sz"` + + // price + price *string `param:"px"` +} + +func (r *PlaceOrderRequest) Parameters() map[string]interface{} { + params, _ := r.GetParameters() + return params +} + +func (r *PlaceOrderRequest) Do(ctx context.Context) (*OrderResponse, error) { + payload := r.Parameters() + req, err := r.client.newAuthenticatedRequest("POST", "/api/v5/trade/order", nil, payload) + if err != nil { + return nil, err + } + + response, err := r.client.sendRequest(req) + if err != nil { + return nil, err + } + + var orderResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []OrderResponse `json:"data"` + } + if err := response.DecodeJSON(&orderResponse); err != nil { + return nil, err + } + + if len(orderResponse.Data) == 0 { + return nil, errors.New("order create error") + } + + return &orderResponse.Data[0], nil +} + +//go:generate requestgen -type CancelOrderRequest +type CancelOrderRequest struct { + client *RestClient + + instrumentID string `param:"instId"` + orderID *string `param:"ordId"` + clientOrderID *string `param:"clOrdId"` +} + +func (r *CancelOrderRequest) Parameters() map[string]interface{} { + payload, _ := r.GetParameters() + return payload +} + +func (r *CancelOrderRequest) Do(ctx context.Context) ([]OrderResponse, error) { + payload, err := r.GetParameters() + if err != nil { + return nil, err + } + + if r.clientOrderID == nil && r.orderID != nil { + return nil, errors.New("either orderID or clientOrderID is required for canceling order") + } + + req, err := r.client.newAuthenticatedRequest("POST", "/api/v5/trade/cancel-order", nil, payload) + if err != nil { + return nil, err + } + + response, err := r.client.sendRequest(req) + if err != nil { + return nil, err + } + + var orderResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []OrderResponse `json:"data"` + } + if err := response.DecodeJSON(&orderResponse); err != nil { + return nil, err + } + + return orderResponse.Data, nil +} + +type BatchCancelOrderRequest struct { + client *RestClient + + reqs []*CancelOrderRequest +} + +func (r *BatchCancelOrderRequest) Add(reqs ...*CancelOrderRequest) *BatchCancelOrderRequest { + r.reqs = append(r.reqs, reqs...) + return r +} + +func (r *BatchCancelOrderRequest) Do(ctx context.Context) ([]OrderResponse, error) { + var parameterList []map[string]interface{} + + for _, req := range r.reqs { + params := req.Parameters() + parameterList = append(parameterList, params) + } + + req, err := r.client.newAuthenticatedRequest("POST", "/api/v5/trade/cancel-batch-orders", nil, parameterList) + if err != nil { + return nil, err + } + + response, err := r.client.sendRequest(req) + if err != nil { + return nil, err + } + + var orderResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []OrderResponse `json:"data"` + } + if err := response.DecodeJSON(&orderResponse); err != nil { + return nil, err + } + + return orderResponse.Data, nil +} + +type BatchPlaceOrderRequest struct { + client *RestClient + + reqs []*PlaceOrderRequest +} + +func (r *BatchPlaceOrderRequest) Add(reqs ...*PlaceOrderRequest) *BatchPlaceOrderRequest { + r.reqs = append(r.reqs, reqs...) + return r +} + +func (r *BatchPlaceOrderRequest) Do(ctx context.Context) ([]OrderResponse, error) { + var parameterList []map[string]interface{} + + for _, req := range r.reqs { + params := req.Parameters() + parameterList = append(parameterList, params) + } + + req, err := r.client.newAuthenticatedRequest("POST", "/api/v5/trade/batch-orders", nil, parameterList) + if err != nil { + return nil, err + } + + response, err := r.client.sendRequest(req) + if err != nil { + return nil, err + } + + var orderResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []OrderResponse `json:"data"` + } + if err := response.DecodeJSON(&orderResponse); err != nil { + return nil, err + } + + return orderResponse.Data, nil +} + +type OrderDetails struct { + InstrumentType string `json:"instType"` + InstrumentID string `json:"instId"` + Tag string `json:"tag"` + Price fixedpoint.Value `json:"px"` + Quantity fixedpoint.Value `json:"sz"` + + OrderID string `json:"ordId"` + ClientOrderID string `json:"clOrdId"` + OrderType OrderType `json:"ordType"` + Side SideType `json:"side"` + + // Accumulated fill quantity + FilledQuantity fixedpoint.Value `json:"accFillSz"` + + FeeCurrency string `json:"feeCcy"` + Fee fixedpoint.Value `json:"fee"` + + // trade related fields + LastTradeID string `json:"tradeId,omitempty"` + LastFilledPrice fixedpoint.Value `json:"fillPx"` + LastFilledQuantity fixedpoint.Value `json:"fillSz"` + LastFilledTime types.MillisecondTimestamp `json:"fillTime"` + LastFilledFee fixedpoint.Value `json:"fillFee"` + LastFilledFeeCurrency string `json:"fillFeeCcy"` + + // ExecutionType = liquidity (M = maker or T = taker) + ExecutionType string `json:"execType"` + + // Average filled price. If none is filled, it will return 0. + AveragePrice fixedpoint.Value `json:"avgPx"` + + // Currency = Margin currency + // Only applicable to cross MARGIN orders in Single-currency margin. + Currency string `json:"ccy"` + + // Leverage = from 0.01 to 125. + // Only applicable to MARGIN/FUTURES/SWAP + Leverage fixedpoint.Value `json:"lever"` + + RebateCurrency string `json:"rebateCcy"` + Rebate fixedpoint.Value `json:"rebate"` + + PnL fixedpoint.Value `json:"pnl"` + + UpdateTime types.MillisecondTimestamp `json:"uTime"` + CreationTime types.MillisecondTimestamp `json:"cTime"` + + State OrderState `json:"state"` +} + +type GetOrderDetailsRequest struct { + client *RestClient + + instId string + ordId *string + clOrdId *string +} + +func (r *GetOrderDetailsRequest) InstrumentID(instId string) *GetOrderDetailsRequest { + r.instId = instId + return r +} + +func (r *GetOrderDetailsRequest) OrderID(orderID string) *GetOrderDetailsRequest { + r.ordId = &orderID + return r +} + +func (r *GetOrderDetailsRequest) ClientOrderID(clientOrderID string) *GetOrderDetailsRequest { + r.clOrdId = &clientOrderID + return r +} + +func (r *GetOrderDetailsRequest) QueryParameters() url.Values { + var values = url.Values{} + + values.Add("instId", r.instId) + + if r.ordId != nil { + values.Add("ordId", *r.ordId) + } else if r.clOrdId != nil { + values.Add("clOrdId", *r.clOrdId) + } + + return values +} + +func (r *GetOrderDetailsRequest) Do(ctx context.Context) (*OrderDetails, error) { + params := r.QueryParameters() + req, err := r.client.newAuthenticatedRequest("GET", "/api/v5/trade/order", params, nil) + if err != nil { + return nil, err + } + + response, err := r.client.sendRequest(req) + if err != nil { + return nil, err + } + + var orderResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []OrderDetails `json:"data"` + } + if err := response.DecodeJSON(&orderResponse); err != nil { + return nil, err + } + + if len(orderResponse.Data) == 0 { + return nil, errors.New("order create error") + } + + return &orderResponse.Data[0], nil +} + +type GetPendingOrderRequest struct { + client *RestClient + + instId *string + + instType *InstrumentType + + orderTypes []string + + state *OrderState +} + +func (r *GetPendingOrderRequest) InstrumentID(instId string) *GetPendingOrderRequest { + r.instId = &instId + return r +} + +func (r *GetPendingOrderRequest) InstrumentType(instType InstrumentType) *GetPendingOrderRequest { + r.instType = &instType + return r +} + +func (r *GetPendingOrderRequest) State(state OrderState) *GetPendingOrderRequest { + r.state = &state + return r +} + +func (r *GetPendingOrderRequest) OrderTypes(orderTypes []string) *GetPendingOrderRequest { + r.orderTypes = orderTypes + return r +} + +func (r *GetPendingOrderRequest) AddOrderTypes(orderTypes ...string) *GetPendingOrderRequest { + r.orderTypes = append(r.orderTypes, orderTypes...) + return r +} + +func (r *GetPendingOrderRequest) Parameters() map[string]interface{} { + var payload = map[string]interface{}{} + + if r.instId != nil { + payload["instId"] = r.instId + } + + if r.instType != nil { + payload["instType"] = r.instType + } + + if r.state != nil { + payload["state"] = r.state + } + + if len(r.orderTypes) > 0 { + payload["ordType"] = strings.Join(r.orderTypes, ",") + } + + return payload +} + +func (r *GetPendingOrderRequest) Do(ctx context.Context) ([]OrderDetails, error) { + payload := r.Parameters() + req, err := r.client.newAuthenticatedRequest("GET", "/api/v5/trade/orders-pending", nil, payload) + if err != nil { + return nil, err + } + + response, err := r.client.sendRequest(req) + if err != nil { + return nil, err + } + + var orderResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []OrderDetails `json:"data"` + } + if err := response.DecodeJSON(&orderResponse); err != nil { + return nil, err + } + + return orderResponse.Data, nil +} + +type GetTransactionDetailsRequest struct { + client *RestClient + + instType *InstrumentType + + instId *string + + ordId *string +} + +func (r *GetTransactionDetailsRequest) InstrumentType(instType InstrumentType) *GetTransactionDetailsRequest { + r.instType = &instType + return r +} + +func (r *GetTransactionDetailsRequest) InstrumentID(instId string) *GetTransactionDetailsRequest { + r.instId = &instId + return r +} + +func (r *GetTransactionDetailsRequest) OrderID(orderID string) *GetTransactionDetailsRequest { + r.ordId = &orderID + return r +} + +func (r *GetTransactionDetailsRequest) Parameters() map[string]interface{} { + var payload = map[string]interface{}{} + + if r.instType != nil { + payload["instType"] = r.instType + } + + if r.instId != nil { + payload["instId"] = r.instId + } + + if r.ordId != nil { + payload["ordId"] = r.ordId + } + + return payload +} + +func (r *GetTransactionDetailsRequest) Do(ctx context.Context) ([]OrderDetails, error) { + payload := r.Parameters() + req, err := r.client.newAuthenticatedRequest("GET", "/api/v5/trade/fills", nil, payload) + if err != nil { + return nil, err + } + + response, err := r.client.sendRequest(req) + if err != nil { + return nil, err + } + + var orderResponse struct { + Code string `json:"code"` + Message string `json:"msg"` + Data []OrderDetails `json:"data"` + } + if err := response.DecodeJSON(&orderResponse); err != nil { + return nil, err + } + + return orderResponse.Data, nil +} diff --git a/pkg/exchange/okex/parse.go b/pkg/exchange/okex/parse.go new file mode 100644 index 0000000000..6b9c019ed1 --- /dev/null +++ b/pkg/exchange/okex/parse.go @@ -0,0 +1,351 @@ +package okex + +import ( + "encoding/json" + "errors" + "fmt" + "strconv" + "strings" + "time" + + "github.com/valyala/fastjson" + + "github.com/c9s/bbgo/pkg/exchange/okex/okexapi" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func parseWebSocketEvent(str []byte) (interface{}, error) { + v, err := fastjson.ParseBytes(str) + if err != nil { + return nil, err + } + + if v.Exists("event") { + return parseEvent(v) + } + + if v.Exists("data") { + return parseData(v) + } + + return nil, nil +} + +type WebSocketEvent struct { + Event string `json:"event"` + Code string `json:"code,omitempty"` + Message string `json:"msg,omitempty"` + Arg interface{} `json:"arg,omitempty"` +} + +func parseEvent(v *fastjson.Value) (*WebSocketEvent, error) { + // event could be "subscribe", "unsubscribe" or "error" + event := string(v.GetStringBytes("event")) + code := string(v.GetStringBytes("code")) + message := string(v.GetStringBytes("msg")) + arg := v.GetObject("arg") + return &WebSocketEvent{ + Event: event, + Code: code, + Message: message, + Arg: arg, + }, nil +} + +type BookEvent struct { + InstrumentID string + Symbol string + Action string + Bids []BookEntry + Asks []BookEntry + MillisecondTimestamp int64 + Checksum int + channel string +} + +func (data *BookEvent) BookTicker() types.BookTicker { + + var askBookData BookEntry = data.Asks[0] + var bidBookData BookEntry = data.Bids[0] + + return types.BookTicker{ + Symbol: data.Symbol, + Buy: bidBookData.Price, + BuySize: bidBookData.Price, + Sell: askBookData.Price, + SellSize: askBookData.Volume, + } +} + +func (data *BookEvent) Book() types.SliceOrderBook { + book := types.SliceOrderBook{ + Symbol: data.Symbol, + } + + for _, bid := range data.Bids { + book.Bids = append(book.Bids, types.PriceVolume{Price: bid.Price, Volume: bid.Volume}) + } + + for _, ask := range data.Asks { + book.Asks = append(book.Asks, types.PriceVolume{Price: ask.Price, Volume: ask.Volume}) + } + + return book +} + +type BookEntry struct { + Price fixedpoint.Value + Volume fixedpoint.Value + NumLiquidated int + NumOrders int +} + +func parseBookEntry(v *fastjson.Value) (*BookEntry, error) { + arr, err := v.Array() + if err != nil { + return nil, err + } + + if len(arr) < 4 { + return nil, fmt.Errorf("unexpected book entry size: %d", len(arr)) + } + + price := fixedpoint.Must(fixedpoint.NewFromString(string(arr[0].GetStringBytes()))) + volume := fixedpoint.Must(fixedpoint.NewFromString(string(arr[1].GetStringBytes()))) + numLiquidated, err := strconv.Atoi(string(arr[2].GetStringBytes())) + if err != nil { + return nil, err + } + + numOrders, err := strconv.Atoi(string(arr[3].GetStringBytes())) + if err != nil { + return nil, err + } + + return &BookEntry{ + Price: price, + Volume: volume, + NumLiquidated: numLiquidated, + NumOrders: numOrders, + }, nil +} + +func parseBookData(v *fastjson.Value) (*BookEvent, error) { + instrumentId := string(v.GetStringBytes("arg", "instId")) + data := v.GetArray("data") + if len(data) == 0 { + return nil, errors.New("empty data payload") + } + + // "snapshot" or "update" + action := string(v.GetStringBytes("action")) + + millisecondTimestamp, err := strconv.ParseInt(string(data[0].GetStringBytes("ts")), 10, 64) + if err != nil { + return nil, err + } + + checksum := data[0].GetInt("checksum") + + var asks []BookEntry + var bids []BookEntry + + for _, v := range data[0].GetArray("asks") { + entry, err := parseBookEntry(v) + if err != nil { + return nil, err + } + asks = append(asks, *entry) + } + + for _, v := range data[0].GetArray("bids") { + entry, err := parseBookEntry(v) + if err != nil { + return nil, err + } + bids = append(bids, *entry) + } + + return &BookEvent{ + InstrumentID: instrumentId, + Symbol: toGlobalSymbol(instrumentId), + Action: action, + Bids: bids, + Asks: asks, + Checksum: checksum, + MillisecondTimestamp: millisecondTimestamp, + }, nil +} + +type Candle struct { + Channel string + InstrumentID string + Symbol string + Interval string + Open fixedpoint.Value + High fixedpoint.Value + Low fixedpoint.Value + Close fixedpoint.Value + + // Trading volume, with a unit of contact. + // If it is a derivatives contract, the value is the number of contracts. + // If it is SPOT/MARGIN, the value is the amount of trading currency. + Volume fixedpoint.Value + + // Trading volume, with a unit of currency. + // If it is a derivatives contract, the value is the number of settlement currency. + // If it is SPOT/MARGIN, the value is the number of quote currency. + VolumeInCurrency fixedpoint.Value + + MillisecondTimestamp int64 + + StartTime time.Time +} + +func (c *Candle) KLine() types.KLine { + interval := types.Interval(c.Interval) + endTime := c.StartTime.Add(interval.Duration() - 1*time.Millisecond) + return types.KLine{ + Exchange: types.ExchangeOKEx, + Interval: interval, + Open: c.Open, + High: c.High, + Low: c.Low, + Close: c.Close, + Volume: c.Volume, + QuoteVolume: c.VolumeInCurrency, + StartTime: types.Time(c.StartTime), + EndTime: types.Time(endTime), + } +} + +func parseCandle(channel string, v *fastjson.Value) (*Candle, error) { + instrumentID := string(v.GetStringBytes("arg", "instId")) + data, err := v.Get("data").Array() + if err != nil { + return nil, err + } + + if len(data) == 0 { + return nil, errors.New("candle data is empty") + } + + arr, err := data[0].Array() + if err != nil { + return nil, err + } + + if len(arr) < 7 { + return nil, fmt.Errorf("unexpected candle data length: %d", len(arr)) + } + + interval := strings.ToLower(strings.TrimPrefix(channel, "candle")) + + timestamp, err := strconv.ParseInt(string(arr[0].GetStringBytes()), 10, 64) + if err != nil { + return nil, err + } + + open, err := fixedpoint.NewFromString(string(arr[1].GetStringBytes())) + if err != nil { + return nil, err + } + + high, err := fixedpoint.NewFromString(string(arr[2].GetStringBytes())) + if err != nil { + return nil, err + } + + low, err := fixedpoint.NewFromString(string(arr[3].GetStringBytes())) + if err != nil { + return nil, err + } + + cls, err := fixedpoint.NewFromString(string(arr[4].GetStringBytes())) + if err != nil { + return nil, err + } + + vol, err := fixedpoint.NewFromString(string(arr[5].GetStringBytes())) + if err != nil { + return nil, err + } + + volCurrency, err := fixedpoint.NewFromString(string(arr[6].GetStringBytes())) + if err != nil { + return nil, err + } + + candleTime := time.Unix(0, timestamp*int64(time.Millisecond)) + return &Candle{ + Channel: channel, + InstrumentID: instrumentID, + Symbol: toGlobalSymbol(instrumentID), + Interval: interval, + Open: open, + High: high, + Low: low, + Close: cls, + Volume: vol, + VolumeInCurrency: volCurrency, + MillisecondTimestamp: timestamp, + StartTime: candleTime, + }, nil +} + +func parseAccount(v *fastjson.Value) (*okexapi.Account, error) { + data := v.Get("data").MarshalTo(nil) + + var accounts []okexapi.Account + err := json.Unmarshal(data, &accounts) + if err != nil { + return nil, err + } + + if len(accounts) == 0 { + return nil, errors.New("empty account data") + } + + return &accounts[0], nil +} + +func parseOrder(v *fastjson.Value) ([]okexapi.OrderDetails, error) { + data := v.Get("data").MarshalTo(nil) + + var orderDetails []okexapi.OrderDetails + err := json.Unmarshal(data, &orderDetails) + if err != nil { + return nil, err + } + + return orderDetails, nil +} + +func parseData(v *fastjson.Value) (interface{}, error) { + + channel := string(v.GetStringBytes("arg", "channel")) + + switch channel { + case "books5": + data, err := parseBookData(v) + data.channel = channel + return data, err + case "books": + data, err := parseBookData(v) + data.channel = channel + return data, err + case "account": + return parseAccount(v) + case "orders": + return parseOrder(v) + default: + if strings.HasPrefix(channel, "candle") { + data, err := parseCandle(channel, v) + return data, err + } + + } + + return nil, nil +} diff --git a/pkg/exchange/okex/stream.go b/pkg/exchange/okex/stream.go new file mode 100644 index 0000000000..348f486540 --- /dev/null +++ b/pkg/exchange/okex/stream.go @@ -0,0 +1,223 @@ +package okex + +import ( + "context" + "strconv" + "time" + + "github.com/c9s/bbgo/pkg/exchange/okex/okexapi" + "github.com/c9s/bbgo/pkg/types" +) + +type WebsocketOp struct { + Op string `json:"op"` + Args interface{} `json:"args"` +} + +type WebsocketLogin struct { + Key string `json:"apiKey"` + Passphrase string `json:"passphrase"` + Timestamp string `json:"timestamp"` + Sign string `json:"sign"` +} + +//go:generate callbackgen -type Stream -interface +type Stream struct { + types.StandardStream + + client *okexapi.RestClient + + // public callbacks + candleEventCallbacks []func(candle Candle) + bookEventCallbacks []func(book BookEvent) + eventCallbacks []func(event WebSocketEvent) + accountEventCallbacks []func(account okexapi.Account) + orderDetailsEventCallbacks []func(orderDetails []okexapi.OrderDetails) + + lastCandle map[CandleKey]Candle +} + +type CandleKey struct { + InstrumentID string + Channel string +} + +func NewStream(client *okexapi.RestClient) *Stream { + stream := &Stream{ + client: client, + StandardStream: types.NewStandardStream(), + lastCandle: make(map[CandleKey]Candle), + } + + stream.SetParser(parseWebSocketEvent) + stream.SetDispatcher(stream.dispatchEvent) + stream.SetEndpointCreator(stream.createEndpoint) + + stream.OnCandleEvent(stream.handleCandleEvent) + stream.OnBookEvent(stream.handleBookEvent) + stream.OnAccountEvent(stream.handleAccountEvent) + stream.OnOrderDetailsEvent(stream.handleOrderDetailsEvent) + stream.OnEvent(stream.handleEvent) + stream.OnConnect(stream.handleConnect) + return stream +} + +func (s *Stream) handleConnect() { + if s.PublicOnly { + var subs []WebsocketSubscription + for _, subscription := range s.Subscriptions { + sub, err := convertSubscription(subscription) + if err != nil { + log.WithError(err).Errorf("subscription convert error") + continue + } + + subs = append(subs, sub) + } + if len(subs) == 0 { + return + } + + log.Infof("subscribing channels: %+v", subs) + err := s.Conn.WriteJSON(WebsocketOp{ + Op: "subscribe", + Args: subs, + }) + + if err != nil { + log.WithError(err).Error("subscribe error") + } + } else { + // login as private channel + // sign example: + // sign=CryptoJS.enc.Base64.Stringify(CryptoJS.HmacSHA256(timestamp +'GET'+'/users/self/verify', secretKey)) + msTimestamp := strconv.FormatFloat(float64(time.Now().UnixNano())/float64(time.Second), 'f', -1, 64) + payload := msTimestamp + "GET" + "/users/self/verify" + sign := okexapi.Sign(payload, s.client.Secret) + op := WebsocketOp{ + Op: "login", + Args: []WebsocketLogin{ + { + Key: s.client.Key, + Passphrase: s.client.Passphrase, + Timestamp: msTimestamp, + Sign: sign, + }, + }, + } + + log.Infof("sending okex login request") + err := s.Conn.WriteJSON(op) + if err != nil { + log.WithError(err).Errorf("can not send login message") + } + } +} + +func (s *Stream) handleEvent(event WebSocketEvent) { + switch event.Event { + case "login": + if event.Code == "0" { + var subs = []WebsocketSubscription{ + {Channel: "account"}, + {Channel: "orders", InstrumentType: string(okexapi.InstrumentTypeSpot)}, + } + + log.Infof("subscribing private channels: %+v", subs) + err := s.Conn.WriteJSON(WebsocketOp{ + Op: "subscribe", + Args: subs, + }) + + if err != nil { + log.WithError(err).Error("private channel subscribe error") + } + } + } +} + +func (s *Stream) handleOrderDetailsEvent(orderDetails []okexapi.OrderDetails) { + detailTrades, detailOrders := segmentOrderDetails(orderDetails) + + trades, err := toGlobalTrades(detailTrades) + if err != nil { + log.WithError(err).Errorf("error converting order details into trades") + } else { + for _, trade := range trades { + s.EmitTradeUpdate(trade) + } + } + + orders, err := toGlobalOrders(detailOrders) + if err != nil { + log.WithError(err).Errorf("error converting order details into orders") + } else { + for _, order := range orders { + s.EmitOrderUpdate(order) + } + } +} + +func (s *Stream) handleAccountEvent(account okexapi.Account) { + balances := toGlobalBalance(&account) + s.EmitBalanceSnapshot(balances) +} + +func (s *Stream) handleBookEvent(data BookEvent) { + book := data.Book() + switch data.Action { + case "snapshot": + s.EmitBookSnapshot(book) + case "update": + s.EmitBookUpdate(book) + } +} + +func (s *Stream) handleCandleEvent(candle Candle) { + key := CandleKey{Channel: candle.Channel, InstrumentID: candle.InstrumentID} + kline := candle.KLine() + + // check if we need to close previous kline + lastCandle, ok := s.lastCandle[key] + if ok && candle.StartTime.After(lastCandle.StartTime) { + lastKline := lastCandle.KLine() + lastKline.Closed = true + s.EmitKLineClosed(lastKline) + } + + s.EmitKLine(kline) + s.lastCandle[key] = candle +} + +func (s *Stream) createEndpoint(ctx context.Context) (string, error) { + var url string + if s.PublicOnly { + url = okexapi.PublicWebSocketURL + } else { + url = okexapi.PrivateWebSocketURL + } + return url, nil +} + +func (s *Stream) dispatchEvent(e interface{}) { + switch et := e.(type) { + case *WebSocketEvent: + s.EmitEvent(*et) + + case *BookEvent: + // there's "books" for 400 depth and books5 for 5 depth + if et.channel != "books5" { + s.EmitBookEvent(*et) + } + s.EmitBookTickerUpdate(et.BookTicker()) + case *Candle: + s.EmitCandleEvent(*et) + + case *okexapi.Account: + s.EmitAccountEvent(*et) + + case []okexapi.OrderDetails: + s.EmitOrderDetailsEvent(et) + + } +} diff --git a/pkg/exchange/okex/stream_callbacks.go b/pkg/exchange/okex/stream_callbacks.go new file mode 100644 index 0000000000..6fb6e72313 --- /dev/null +++ b/pkg/exchange/okex/stream_callbacks.go @@ -0,0 +1,69 @@ +// Code generated by "callbackgen -type Stream -interface"; DO NOT EDIT. + +package okex + +import ( + "github.com/c9s/bbgo/pkg/exchange/okex/okexapi" +) + +func (s *Stream) OnCandleEvent(cb func(candle Candle)) { + s.candleEventCallbacks = append(s.candleEventCallbacks, cb) +} + +func (s *Stream) EmitCandleEvent(candle Candle) { + for _, cb := range s.candleEventCallbacks { + cb(candle) + } +} + +func (s *Stream) OnBookEvent(cb func(book BookEvent)) { + s.bookEventCallbacks = append(s.bookEventCallbacks, cb) +} + +func (s *Stream) EmitBookEvent(book BookEvent) { + for _, cb := range s.bookEventCallbacks { + cb(book) + } +} + +func (s *Stream) OnEvent(cb func(event WebSocketEvent)) { + s.eventCallbacks = append(s.eventCallbacks, cb) +} + +func (s *Stream) EmitEvent(event WebSocketEvent) { + for _, cb := range s.eventCallbacks { + cb(event) + } +} + +func (s *Stream) OnAccountEvent(cb func(account okexapi.Account)) { + s.accountEventCallbacks = append(s.accountEventCallbacks, cb) +} + +func (s *Stream) EmitAccountEvent(account okexapi.Account) { + for _, cb := range s.accountEventCallbacks { + cb(account) + } +} + +func (s *Stream) OnOrderDetailsEvent(cb func(orderDetails []okexapi.OrderDetails)) { + s.orderDetailsEventCallbacks = append(s.orderDetailsEventCallbacks, cb) +} + +func (s *Stream) EmitOrderDetailsEvent(orderDetails []okexapi.OrderDetails) { + for _, cb := range s.orderDetailsEventCallbacks { + cb(orderDetails) + } +} + +type StreamEventHub interface { + OnCandleEvent(cb func(candle Candle)) + + OnBookEvent(cb func(book BookEvent)) + + OnEvent(cb func(event WebSocketEvent)) + + OnAccountEvent(cb func(account okexapi.Account)) + + OnOrderDetailsEvent(cb func(orderDetails []okexapi.OrderDetails)) +} diff --git a/pkg/exchange/okex/symbols.go b/pkg/exchange/okex/symbols.go new file mode 100644 index 0000000000..dfbff11566 --- /dev/null +++ b/pkg/exchange/okex/symbols.go @@ -0,0 +1,516 @@ +// Code generated by go generate; DO NOT EDIT. +package okex + +var spotSymbolMap = map[string]string{ + "1INCHETH": "1INCH-ETH", + "1INCHUSDT": "1INCH-USDT", + "AACUSDT": "AAC-USDT", + "AAVEBTC": "AAVE-BTC", + "AAVEETH": "AAVE-ETH", + "AAVEUSDT": "AAVE-USDT", + "ABTBTC": "ABT-BTC", + "ABTETH": "ABT-ETH", + "ABTUSDT": "ABT-USDT", + "ACTBTC": "ACT-BTC", + "ACTUSDT": "ACT-USDT", + "ADABTC": "ADA-BTC", + "ADAETH": "ADA-ETH", + "ADAUSDT": "ADA-USDT", + "AEBTC": "AE-BTC", + "AEETH": "AE-ETH", + "AERGOBTC": "AERGO-BTC", + "AERGOUSDT": "AERGO-USDT", + "AEUSDT": "AE-USDT", + "AKITAUSDT": "AKITA-USDT", + "ALGOBTC": "ALGO-BTC", + "ALGOUSDK": "ALGO-USDK", + "ALGOUSDT": "ALGO-USDT", + "ALPHABTC": "ALPHA-BTC", + "ALPHAUSDT": "ALPHA-USDT", + "ALVUSDT": "ALV-USDT", + "ANCUSDT": "ANC-USDT", + "ANTBTC": "ANT-BTC", + "ANTUSDT": "ANT-USDT", + "ANWUSDT": "ANW-USDT", + "API3ETH": "API3-ETH", + "API3USDT": "API3-USDT", + "APIXUSDT": "APIX-USDT", + "APMUSDT": "APM-USDT", + "ARDRBTC": "ARDR-BTC", + "ARKBTC": "ARK-BTC", + "ARKUSDT": "ARK-USDT", + "ASTUSDT": "AST-USDT", + "ATOMBTC": "ATOM-BTC", + "ATOMETH": "ATOM-ETH", + "ATOMUSDT": "ATOM-USDT", + "AUCTIONUSDT": "AUCTION-USDT", + "AVAXBTC": "AVAX-BTC", + "AVAXETH": "AVAX-ETH", + "AVAXUSDT": "AVAX-USDT", + "BADGERBTC": "BADGER-BTC", + "BADGERUSDT": "BADGER-USDT", + "BALBTC": "BAL-BTC", + "BALUSDT": "BAL-USDT", + "BANDUSDT": "BAND-USDT", + "BATBTC": "BAT-BTC", + "BATUSDT": "BAT-USDT", + "BCDBTC": "BCD-BTC", + "BCDUSDT": "BCD-USDT", + "BCHABTC": "BCHA-BTC", + "BCHAUSDT": "BCHA-USDT", + "BCHBTC": "BCH-BTC", + "BCHUSDC": "BCH-USDC", + "BCHUSDK": "BCH-USDK", + "BCHUSDT": "BCH-USDT", + "BCXBTC": "BCX-BTC", + "BETHETH": "BETH-ETH", + "BETHUSDT": "BETH-USDT", + "BHPBTC": "BHP-BTC", + "BHPUSDT": "BHP-USDT", + "BLOCUSDT": "BLOC-USDT", + "BNTBTC": "BNT-BTC", + "BNTUSDT": "BNT-USDT", + "BOXUSDT": "BOX-USDT", + "BSVBTC": "BSV-BTC", + "BSVUSDC": "BSV-USDC", + "BSVUSDK": "BSV-USDK", + "BSVUSDT": "BSV-USDT", + "BTCDAI": "BTC-DAI", + "BTCUSDC": "BTC-USDC", + "BTCUSDK": "BTC-USDK", + "BTCUSDT": "BTC-USDT", + "BTGBTC": "BTG-BTC", + "BTGUSDT": "BTG-USDT", + "BTMBTC": "BTM-BTC", + "BTMETH": "BTM-ETH", + "BTMUSDT": "BTM-USDT", + "BTTBTC": "BTT-BTC", + "BTTUSDT": "BTT-USDT", + "CELOBTC": "CELO-BTC", + "CELOUSDT": "CELO-USDT", + "CELRUSDT": "CELR-USDT", + "CELUSDT": "CEL-USDT", + "CFXBTC": "CFX-BTC", + "CFXUSDT": "CFX-USDT", + "CHATUSDT": "CHAT-USDT", + "CHZBTC": "CHZ-BTC", + "CHZUSDT": "CHZ-USDT", + "CMTBTC": "CMT-BTC", + "CMTETH": "CMT-ETH", + "CMTUSDT": "CMT-USDT", + "CNTMUSDT": "CNTM-USDT", + "COMPBTC": "COMP-BTC", + "COMPUSDT": "COMP-USDT", + "CONVUSDT": "CONV-USDT", + "COVERUSDT": "COVER-USDT", + "CROBTC": "CRO-BTC", + "CROUSDK": "CRO-USDK", + "CROUSDT": "CRO-USDT", + "CRVBTC": "CRV-BTC", + "CRVETH": "CRV-ETH", + "CRVUSDT": "CRV-USDT", + "CSPRUSDT": "CSPR-USDT", + "CTCBTC": "CTC-BTC", + "CTCUSDT": "CTC-USDT", + "CTXCBTC": "CTXC-BTC", + "CTXCETH": "CTXC-ETH", + "CTXCUSDT": "CTXC-USDT", + "CVCBTC": "CVC-BTC", + "CVCUSDT": "CVC-USDT", + "CVPUSDT": "CVP-USDT", + "CVTBTC": "CVT-BTC", + "CVTUSDT": "CVT-USDT", + "DAIUSDT": "DAI-USDT", + "DAOUSDT": "DAO-USDT", + "DASHBTC": "DASH-BTC", + "DASHETH": "DASH-ETH", + "DASHUSDT": "DASH-USDT", + "DCRBTC": "DCR-BTC", + "DCRUSDT": "DCR-USDT", + "DEPUSDK": "DEP-USDK", + "DEPUSDT": "DEP-USDT", + "DGBBTC": "DGB-BTC", + "DGBUSDT": "DGB-USDT", + "DHTETH": "DHT-ETH", + "DHTUSDT": "DHT-USDT", + "DIAETH": "DIA-ETH", + "DIAUSDT": "DIA-USDT", + "DMDUSDT": "DMD-USDT", + "DMGUSDT": "DMG-USDT", + "DNABTC": "DNA-BTC", + "DNAUSDT": "DNA-USDT", + "DOGEBTC": "DOGE-BTC", + "DOGEETH": "DOGE-ETH", + "DOGEUSDK": "DOGE-USDK", + "DOGEUSDT": "DOGE-USDT", + "DORAUSDT": "DORA-USDT", + "DOTBTC": "DOT-BTC", + "DOTETH": "DOT-ETH", + "DOTUSDT": "DOT-USDT", + "ECUSDK": "EC-USDK", + "ECUSDT": "EC-USDT", + "EGLDBTC": "EGLD-BTC", + "EGLDUSDT": "EGLD-USDT", + "EGTBTC": "EGT-BTC", + "EGTETH": "EGT-ETH", + "EGTUSDT": "EGT-USDT", + "ELFBTC": "ELF-BTC", + "ELFUSDT": "ELF-USDT", + "EMUSDK": "EM-USDK", + "EMUSDT": "EM-USDT", + "ENJBTC": "ENJ-BTC", + "ENJUSDT": "ENJ-USDT", + "EOSBTC": "EOS-BTC", + "EOSETH": "EOS-ETH", + "EOSUSDC": "EOS-USDC", + "EOSUSDK": "EOS-USDK", + "EOSUSDT": "EOS-USDT", + "ETCBTC": "ETC-BTC", + "ETCETH": "ETC-ETH", + "ETCOKB": "ETC-OKB", + "ETCUSDC": "ETC-USDC", + "ETCUSDK": "ETC-USDK", + "ETCUSDT": "ETC-USDT", + "ETHBTC": "ETH-BTC", + "ETHDAI": "ETH-DAI", + "ETHUSDC": "ETH-USDC", + "ETHUSDK": "ETH-USDK", + "ETHUSDT": "ETH-USDT", + "ETMUSDT": "ETM-USDT", + "EXEUSDT": "EXE-USDT", + "FAIRUSDT": "FAIR-USDT", + "FILBTC": "FIL-BTC", + "FILETH": "FIL-ETH", + "FILUSDT": "FIL-USDT", + "FLMUSDT": "FLM-USDT", + "FLOWBTC": "FLOW-BTC", + "FLOWETH": "FLOW-ETH", + "FLOWUSDT": "FLOW-USDT", + "FORTHBTC": "FORTH-BTC", + "FORTHUSDT": "FORTH-USDT", + "FRONTETH": "FRONT-ETH", + "FRONTUSDT": "FRONT-USDT", + "FSNUSDK": "FSN-USDK", + "FSNUSDT": "FSN-USDT", + "FTMUSDK": "FTM-USDK", + "FTMUSDT": "FTM-USDT", + "FUNBTC": "FUN-BTC", + "GALUSDT": "GAL-USDT", + "GASBTC": "GAS-BTC", + "GASETH": "GAS-ETH", + "GASUSDT": "GAS-USDT", + "GHSTETH": "GHST-ETH", + "GHSTUSDT": "GHST-USDT", + "GLMBTC": "GLM-BTC", + "GLMUSDT": "GLM-USDT", + "GNXBTC": "GNX-BTC", + "GRTBTC": "GRT-BTC", + "GRTUSDT": "GRT-USDT", + "GTOBTC": "GTO-BTC", + "GTOUSDT": "GTO-USDT", + "GUSDBTC": "GUSD-BTC", + "GUSDUSDT": "GUSD-USDT", + "HBARBTC": "HBAR-BTC", + "HBARUSDK": "HBAR-USDK", + "HBARUSDT": "HBAR-USDT", + "HCBTC": "HC-BTC", + "HCUSDT": "HC-USDT", + "HDAOUSDK": "HDAO-USDK", + "HDAOUSDT": "HDAO-USDT", + "HEGICETH": "HEGIC-ETH", + "HEGICUSDT": "HEGIC-USDT", + "ICPBTC": "ICP-BTC", + "ICPUSDT": "ICP-USDT", + "ICXBTC": "ICX-BTC", + "ICXUSDT": "ICX-USDT", + "INTBTC": "INT-BTC", + "INTETH": "INT-ETH", + "INTUSDT": "INT-USDT", + "INXUSDT": "INX-USDT", + "IOSTBTC": "IOST-BTC", + "IOSTETH": "IOST-ETH", + "IOSTUSDT": "IOST-USDT", + "IOTABTC": "IOTA-BTC", + "IOTAUSDT": "IOTA-USDT", + "IQUSDT": "IQ-USDT", + "ITCUSDT": "ITC-USDT", + "JFIUSDT": "JFI-USDT", + "JSTUSDT": "JST-USDT", + "KANETH": "KAN-ETH", + "KANUSDT": "KAN-USDT", + "KCASHBTC": "KCASH-BTC", + "KCASHETH": "KCASH-ETH", + "KCASHUSDT": "KCASH-USDT", + "KINEUSDT": "KINE-USDT", + "KISHUUSDT": "KISHU-USDT", + "KLAYBTC": "KLAY-BTC", + "KLAYUSDT": "KLAY-USDT", + "KNCBTC": "KNC-BTC", + "KNCUSDT": "KNC-USDT", + "KONOUSDT": "KONO-USDT", + "KP3RUSDT": "KP3R-USDT", + "KSMBTC": "KSM-BTC", + "KSMETH": "KSM-ETH", + "KSMUSDT": "KSM-USDT", + "LAMBUSDK": "LAMB-USDK", + "LAMBUSDT": "LAMB-USDT", + "LATUSDT": "LAT-USDT", + "LBAUSDT": "LBA-USDT", + "LEOUSDK": "LEO-USDK", + "LEOUSDT": "LEO-USDT", + "LETUSDT": "LET-USDT", + "LINKBTC": "LINK-BTC", + "LINKETH": "LINK-ETH", + "LINKUSDT": "LINK-USDT", + "LMCHUSDT": "LMCH-USDT", + "LONETH": "LON-ETH", + "LONUSDT": "LON-USDT", + "LOONBTC": "LOON-BTC", + "LOONUSDT": "LOON-USDT", + "LPTUSDT": "LPT-USDT", + "LRCBTC": "LRC-BTC", + "LRCUSDT": "LRC-USDT", + "LSKBTC": "LSK-BTC", + "LSKUSDT": "LSK-USDT", + "LTCBTC": "LTC-BTC", + "LTCETH": "LTC-ETH", + "LTCOKB": "LTC-OKB", + "LTCUSDC": "LTC-USDC", + "LTCUSDK": "LTC-USDK", + "LTCUSDT": "LTC-USDT", + "LUNABTC": "LUNA-BTC", + "LUNAUSDT": "LUNA-USDT", + "MANABTC": "MANA-BTC", + "MANAETH": "MANA-ETH", + "MANAUSDT": "MANA-USDT", + "MASKUSDT": "MASK-USDT", + "MATICUSDT": "MATIC-USDT", + "MCOBTC": "MCO-BTC", + "MCOETH": "MCO-ETH", + "MCOUSDT": "MCO-USDT", + "MDAUSDT": "MDA-USDT", + "MDTUSDT": "MDT-USDT", + "MEMEUSDT": "MEME-USDT", + "MIRUSDT": "MIR-USDT", + "MITHBTC": "MITH-BTC", + "MITHETH": "MITH-ETH", + "MITHUSDT": "MITH-USDT", + "MKRBTC": "MKR-BTC", + "MKRETH": "MKR-ETH", + "MKRUSDT": "MKR-USDT", + "MLNUSDT": "MLN-USDT", + "MOFBTC": "MOF-BTC", + "MOFUSDT": "MOF-USDT", + "MXCUSDT": "MXC-USDT", + "MXTUSDT": "MXT-USDT", + "NANOBTC": "NANO-BTC", + "NANOUSDT": "NANO-USDT", + "NASBTC": "NAS-BTC", + "NASUSDT": "NAS-USDT", + "NDNUSDK": "NDN-USDK", + "NDNUSDT": "NDN-USDT", + "NEARBTC": "NEAR-BTC", + "NEARETH": "NEAR-ETH", + "NEARUSDT": "NEAR-USDT", + "NEOBTC": "NEO-BTC", + "NEOETH": "NEO-ETH", + "NEOUSDT": "NEO-USDT", + "NMRUSDT": "NMR-USDT", + "NUBTC": "NU-BTC", + "NULSBTC": "NULS-BTC", + "NULSETH": "NULS-ETH", + "NULSUSDT": "NULS-USDT", + "NUUSDT": "NU-USDT", + "OKBBTC": "OKB-BTC", + "OKBETH": "OKB-ETH", + "OKBUSDC": "OKB-USDC", + "OKBUSDK": "OKB-USDK", + "OKBUSDT": "OKB-USDT", + "OKTBTC": "OKT-BTC", + "OKTETH": "OKT-ETH", + "OKTUSDT": "OKT-USDT", + "OMETH": "OM-ETH", + "OMGBTC": "OMG-BTC", + "OMGUSDT": "OMG-USDT", + "OMUSDT": "OM-USDT", + "ONTBTC": "ONT-BTC", + "ONTETH": "ONT-ETH", + "ONTUSDT": "ONT-USDT", + "ORBSUSDK": "ORBS-USDK", + "ORBSUSDT": "ORBS-USDT", + "ORSUSDT": "ORS-USDT", + "OXTUSDT": "OXT-USDT", + "PAXBTC": "PAX-BTC", + "PAXUSDT": "PAX-USDT", + "PAYBTC": "PAY-BTC", + "PAYUSDT": "PAY-USDT", + "PERPUSDT": "PERP-USDT", + "PHAETH": "PHA-ETH", + "PHAUSDT": "PHA-USDT", + "PICKLEUSDT": "PICKLE-USDT", + "PLGUSDK": "PLG-USDK", + "PLGUSDT": "PLG-USDT", + "PMABTC": "PMA-BTC", + "PMAUSDK": "PMA-USDK", + "PNKUSDT": "PNK-USDT", + "POLSETH": "POLS-ETH", + "POLSUSDT": "POLS-USDT", + "PPTUSDT": "PPT-USDT", + "PROPSETH": "PROPS-ETH", + "PROPSUSDT": "PROPS-USDT", + "PRQUSDT": "PRQ-USDT", + "PSTBTC": "PST-BTC", + "PSTUSDT": "PST-USDT", + "QTUMBTC": "QTUM-BTC", + "QTUMETH": "QTUM-ETH", + "QTUMUSDT": "QTUM-USDT", + "QUNBTC": "QUN-BTC", + "QUNUSDT": "QUN-USDT", + "RENBTC": "REN-BTC", + "RENUSDT": "REN-USDT", + "REPETH": "REP-ETH", + "REPUSDT": "REP-USDT", + "RFUELETH": "RFUEL-ETH", + "RFUELUSDT": "RFUEL-USDT", + "RIOUSDT": "RIO-USDT", + "RNTUSDT": "RNT-USDT", + "ROADUSDK": "ROAD-USDK", + "ROADUSDT": "ROAD-USDT", + "RSRBTC": "RSR-BTC", + "RSRETH": "RSR-ETH", + "RSRUSDT": "RSR-USDT", + "RVNBTC": "RVN-BTC", + "RVNUSDT": "RVN-USDT", + "SANDUSDT": "SAND-USDT", + "SBTCBTC": "SBTC-BTC", + "SCBTC": "SC-BTC", + "SCUSDT": "SC-USDT", + "SFGUSDT": "SFG-USDT", + "SHIBUSDT": "SHIB-USDT", + "SKLUSDT": "SKL-USDT", + "SNCBTC": "SNC-BTC", + "SNTBTC": "SNT-BTC", + "SNTUSDT": "SNT-USDT", + "SNXETH": "SNX-ETH", + "SNXUSDT": "SNX-USDT", + "SOCUSDT": "SOC-USDT", + "SOLBTC": "SOL-BTC", + "SOLETH": "SOL-ETH", + "SOLUSDT": "SOL-USDT", + "SRMBTC": "SRM-BTC", + "SRMUSDT": "SRM-USDT", + "STORJUSDT": "STORJ-USDT", + "STRKUSDT": "STRK-USDT", + "STXBTC": "STX-BTC", + "STXUSDT": "STX-USDT", + "SUNETH": "SUN-ETH", + "SUNUSDT": "SUN-USDT", + "SUSHIETH": "SUSHI-ETH", + "SUSHIUSDT": "SUSHI-USDT", + "SWFTCBTC": "SWFTC-BTC", + "SWFTCETH": "SWFTC-ETH", + "SWFTCUSDT": "SWFTC-USDT", + "SWRVUSDT": "SWRV-USDT", + "TAIUSDT": "TAI-USDT", + "TCTBTC": "TCT-BTC", + "TCTUSDT": "TCT-USDT", + "THETABTC": "THETA-BTC", + "THETAUSDT": "THETA-USDT", + "TMTGBTC": "TMTG-BTC", + "TMTGUSDT": "TMTG-USDT", + "TOPCUSDT": "TOPC-USDT", + "TORNETH": "TORN-ETH", + "TORNUSDT": "TORN-USDT", + "TRADEETH": "TRADE-ETH", + "TRADEUSDT": "TRADE-USDT", + "TRAUSDT": "TRA-USDT", + "TRBUSDT": "TRB-USDT", + "TRIOBTC": "TRIO-BTC", + "TRIOUSDT": "TRIO-USDT", + "TRUEBTC": "TRUE-BTC", + "TRUEUSDT": "TRUE-USDT", + "TRXBTC": "TRX-BTC", + "TRXETH": "TRX-ETH", + "TRXUSDC": "TRX-USDC", + "TRXUSDK": "TRX-USDK", + "TRXUSDT": "TRX-USDT", + "TUSDBTC": "TUSD-BTC", + "TUSDUSDT": "TUSD-USDT", + "UBTCUSDT": "UBTC-USDT", + "UMAUSDT": "UMA-USDT", + "UNIBTC": "UNI-BTC", + "UNIETH": "UNI-ETH", + "UNIUSDT": "UNI-USDT", + "USDCBTC": "USDC-BTC", + "USDCUSDT": "USDC-USDT", + "USDTUSDK": "USDT-USDK", + "UTKUSDT": "UTK-USDT", + "VALUEETH": "VALUE-ETH", + "VALUEUSDT": "VALUE-USDT", + "VELOUSDT": "VELO-USDT", + "VIBBTC": "VIB-BTC", + "VIBUSDT": "VIB-USDT", + "VITEBTC": "VITE-BTC", + "VRAUSDT": "VRA-USDT", + "VSYSBTC": "VSYS-BTC", + "VSYSUSDK": "VSYS-USDK", + "VSYSUSDT": "VSYS-USDT", + "WAVESBTC": "WAVES-BTC", + "WAVESUSDT": "WAVES-USDT", + "WBTCBTC": "WBTC-BTC", + "WBTCETH": "WBTC-ETH", + "WBTCUSDT": "WBTC-USDT", + "WGRTUSDK": "WGRT-USDK", + "WGRTUSDT": "WGRT-USDT", + "WINGUSDT": "WING-USDT", + "WNXMUSDT": "WNXM-USDT", + "WTCBTC": "WTC-BTC", + "WTCUSDT": "WTC-USDT", + "WXTBTC": "WXT-BTC", + "WXTUSDK": "WXT-USDK", + "WXTUSDT": "WXT-USDT", + "XCHBTC": "XCH-BTC", + "XCHUSDT": "XCH-USDT", + "XEMBTC": "XEM-BTC", + "XEMETH": "XEM-ETH", + "XEMUSDT": "XEM-USDT", + "XLMBTC": "XLM-BTC", + "XLMETH": "XLM-ETH", + "XLMUSDT": "XLM-USDT", + "XMRBTC": "XMR-BTC", + "XMRETH": "XMR-ETH", + "XMRUSDT": "XMR-USDT", + "XPOUSDT": "XPO-USDT", + "XPRUSDT": "XPR-USDT", + "XRPBTC": "XRP-BTC", + "XRPETH": "XRP-ETH", + "XRPOKB": "XRP-OKB", + "XRPUSDC": "XRP-USDC", + "XRPUSDK": "XRP-USDK", + "XRPUSDT": "XRP-USDT", + "XSRUSDT": "XSR-USDT", + "XTZBTC": "XTZ-BTC", + "XTZUSDT": "XTZ-USDT", + "XUCUSDT": "XUC-USDT", + "YEEUSDT": "YEE-USDT", + "YFIBTC": "YFI-BTC", + "YFIETH": "YFI-ETH", + "YFIIUSDT": "YFII-USDT", + "YFIUSDT": "YFI-USDT", + "YOUBTC": "YOU-BTC", + "YOUUSDT": "YOU-USDT", + "YOYOUSDT": "YOYO-USDT", + "ZECBTC": "ZEC-BTC", + "ZECETH": "ZEC-ETH", + "ZECUSDT": "ZEC-USDT", + "ZENBTC": "ZEN-BTC", + "ZENUSDT": "ZEN-USDT", + "ZILBTC": "ZIL-BTC", + "ZILETH": "ZIL-ETH", + "ZILUSDT": "ZIL-USDT", + "ZKSUSDT": "ZKS-USDT", + "ZRXBTC": "ZRX-BTC", + "ZRXETH": "ZRX-ETH", + "ZRXUSDT": "ZRX-USDT", + "ZYROUSDT": "ZYRO-USDT", +} diff --git a/pkg/fixedpoint/convert.go b/pkg/fixedpoint/convert.go index 1485fda00b..7fe35c7ee9 100644 --- a/pkg/fixedpoint/convert.go +++ b/pkg/fixedpoint/convert.go @@ -1,39 +1,185 @@ +//go:build !dnum + package fixedpoint import ( - "encoding/json" + "bytes" + "database/sql/driver" + "errors" "fmt" "math" "strconv" + "strings" "sync/atomic" ) +const MaxPrecision = 12 const DefaultPrecision = 8 const DefaultPow = 1e8 type Value int64 +const Zero = Value(0) +const One = Value(1e8) +const NegOne = Value(-1e8) + +type RoundingMode int + +const ( + Up RoundingMode = iota + Down + HalfUp +) + +// Trunc returns the integer portion (truncating any fractional part) +func (v Value) Trunc() Value { + return NewFromFloat(math.Floor(v.Float64())) +} + +func (v Value) Round(r int, mode RoundingMode) Value { + pow := math.Pow10(r) + result := v.Float64() * pow + switch mode { + case Up: + return NewFromFloat(math.Ceil(result) / pow) + case HalfUp: + return NewFromFloat(math.Floor(result+0.5) / pow) + case Down: + return NewFromFloat(math.Floor(result) / pow) + } + return v +} + +func (v Value) Value() (driver.Value, error) { + return v.Float64(), nil +} + +func (v *Value) Scan(src interface{}) error { + switch d := src.(type) { + case int64: + *v = NewFromInt(d) + return nil + + case float64: + *v = NewFromFloat(d) + return nil + + case []byte: + vv, err := NewFromString(string(d)) + if err != nil { + return err + } + *v = vv + return nil + + default: + + } + + return fmt.Errorf("fixedpoint.Value scan error, type: %T is not supported, value; %+v", src, src) +} + func (v Value) Float64() float64 { return float64(v) / DefaultPow } +func (v Value) Abs() Value { + if v < 0 { + return -v + } + return v +} + +func (v Value) String() string { + return strconv.FormatFloat(float64(v)/DefaultPow, 'f', -1, 64) +} + +func (v Value) FormatString(prec int) string { + pow := math.Pow10(prec) + return strconv.FormatFloat( + math.Trunc(float64(v)/DefaultPow*pow)/pow, 'f', prec, 64) +} + +func (v Value) Percentage() string { + if v == 0 { + return "0" + } + return strconv.FormatFloat(float64(v)/DefaultPow*100., 'f', -1, 64) + "%" +} + +func (v Value) FormatPercentage(prec int) string { + if v == 0 { + return "0" + } + pow := math.Pow10(prec) + result := strconv.FormatFloat( + math.Trunc(float64(v)/DefaultPow*pow*100.)/pow, 'f', prec, 64) + return result + "%" +} + +func (v Value) SignedPercentage() string { + if v > 0 { + return "+" + v.Percentage() + } + return v.Percentage() +} + func (v Value) Int64() int64 { - return int64(v) + return int64(v.Float64()) +} + +func (v Value) Int() int { + n := v.Int64() + if int64(int(n)) != n { + panic("unable to convert Value to int32") + } + return int(n) +} + +func (v Value) Neg() Value { + return -v +} + +// TODO inf +func (v Value) Sign() int { + if v > 0 { + return 1 + } else if v == 0 { + return 0 + } else { + return -1 + } +} + +func (v Value) IsZero() bool { + return v == 0 +} + +func Mul(x, y Value) Value { + return NewFromFloat(x.Float64() * y.Float64()) } func (v Value) Mul(v2 Value) Value { return NewFromFloat(v.Float64() * v2.Float64()) } -func (v Value) MulFloat64(v2 float64) Value { - return NewFromFloat(v.Float64() * v2) +func Div(x, y Value) Value { + return NewFromFloat(x.Float64() / y.Float64()) } func (v Value) Div(v2 Value) Value { return NewFromFloat(v.Float64() / v2.Float64()) } +func (v Value) Floor() Value { + return NewFromFloat(math.Floor(v.Float64())) +} + +func (v Value) Ceil() Value { + return NewFromFloat(math.Ceil(v.Float64())) +} + func (v Value) Sub(v2 Value) Value { return Value(int64(v) - int64(v2)) } @@ -60,7 +206,7 @@ func (v *Value) UnmarshalYAML(unmarshal func(a interface{}) error) (err error) { var i int64 if err = unmarshal(&i); err == nil { - *v = NewFromInt64(i) + *v = NewFromInt(i) return } @@ -76,68 +222,290 @@ func (v *Value) UnmarshalYAML(unmarshal func(a interface{}) error) (err error) { return err } +func (v Value) MarshalYAML() (interface{}, error) { + return v.FormatString(DefaultPrecision), nil +} + func (v Value) MarshalJSON() ([]byte, error) { - f := float64(v) / DefaultPow - o := fmt.Sprintf("%f", f) - return []byte(o), nil + return []byte(v.FormatString(DefaultPrecision)), nil } func (v *Value) UnmarshalJSON(data []byte) error { - var a interface{} - var err = json.Unmarshal(data, &a) - if err != nil { + if bytes.Equal(data, []byte{'n', 'u', 'l', 'l'}) { + *v = Zero + return nil + } + if len(data) == 0 { + *v = Zero + return nil + } + var err error + if data[0] == '"' { + data = data[1 : len(data)-1] + } + if *v, err = NewFromString(string(data)); err != nil { return err } + return nil +} - switch d := a.(type) { - case float64: - *v = NewFromFloat(d) +var ErrPrecisionLoss = errors.New("precision loss") - case float32: - *v = NewFromFloat32(d) +func Parse(input string) (num int64, numDecimalPoints int, err error) { + length := len(input) + isPercentage := input[length-1] == '%' + if isPercentage { + length -= 1 + input = input[0:length] + } - case int: - *v = NewFromInt(d) - case int64: - *v = NewFromInt64(d) + var neg int64 = 1 + var digit int64 + for i := 0; i < length; i++ { + c := input[i] + if c == '-' { + neg = -1 + } else if c >= '0' && c <= '9' { + digit, err = strconv.ParseInt(string(c), 10, 64) + if err != nil { + return + } + + num = num*10 + digit + } else if c == '.' { + i++ + if i > len(input)-1 { + err = fmt.Errorf("expect fraction numbers after dot") + return + } + + for j := i; j < len(input); j++ { + fc := input[j] + if fc >= '0' && fc <= '9' { + digit, err = strconv.ParseInt(string(fc), 10, 64) + if err != nil { + return + } + + numDecimalPoints++ + num = num*10 + digit + + if numDecimalPoints >= MaxPrecision { + return num, numDecimalPoints, ErrPrecisionLoss + } + } else { + err = fmt.Errorf("expect digit, got %c", fc) + return + } + } + break + } else { + err = fmt.Errorf("unexpected char %c", c) + return + } + } - default: - return fmt.Errorf("unsupported type: %T %v", d, d) + num = num * neg + if isPercentage { + numDecimalPoints += 2 + } + + return num, numDecimalPoints, nil +} +func NewFromString(input string) (Value, error) { + length := len(input) + + if length == 0 { + return 0, nil } - return nil + isPercentage := input[length-1] == '%' + if isPercentage { + input = input[0 : length-1] + } + dotIndex := -1 + hasDecimal := false + decimalCount := 0 + // if is decimal, we don't need this + hasScientificNotion := false + scIndex := -1 + for i, c := range input { + if hasDecimal { + if c <= '9' && c >= '0' { + decimalCount++ + } else { + break + } + + } else if c == '.' { + dotIndex = i + hasDecimal = true + } + if c == 'e' || c == 'E' { + hasScientificNotion = true + scIndex = i + break + } + } + if hasDecimal { + after := input[dotIndex+1:] + if decimalCount >= 8 { + after = after[0:8] + "." + after[8:] + } else { + after = after[0:decimalCount] + strings.Repeat("0", 8-decimalCount) + after[decimalCount:] + } + input = input[0:dotIndex] + after + v, err := strconv.ParseFloat(input, 64) + if err != nil { + return 0, err + } + + if isPercentage { + v = v * 0.01 + } + + return Value(int64(math.Trunc(v))), nil + + } else if hasScientificNotion { + exp, err := strconv.ParseInt(input[scIndex+1:], 10, 32) + if err != nil { + return 0, err + } + v, err := strconv.ParseFloat(input[0:scIndex+1]+strconv.FormatInt(exp+8, 10), 64) + if err != nil { + return 0, err + } + return Value(int64(math.Trunc(v))), nil + } else { + v, err := strconv.ParseInt(input, 10, 64) + if err != nil { + return 0, err + } + if isPercentage { + v = v * DefaultPow / 100 + } else { + v = v * DefaultPow + } + return Value(v), nil + } } -func Must(v Value, err error) Value { +func MustNewFromString(input string) Value { + v, err := NewFromString(input) if err != nil { - panic(err) + panic(fmt.Errorf("can not parse %s into fixedpoint, error: %s", input, err.Error())) } + return v +} +func NewFromBytes(input []byte) (Value, error) { + return NewFromString(string(input)) +} + +func MustNewFromBytes(input []byte) (v Value) { + var err error + if v, err = NewFromString(string(input)); err != nil { + return Zero + } return v } -func NewFromString(input string) (Value, error) { - v, err := strconv.ParseFloat(input, 64) +func Must(v Value, err error) Value { if err != nil { - return 0, err + panic(err) } - - return NewFromFloat(v), nil + return v } func NewFromFloat(val float64) Value { - return Value(int64(math.Round(val * DefaultPow))) + return Value(int64(math.Trunc(val * DefaultPow))) } -func NewFromFloat32(val float32) Value { - return Value(int64(math.Round(float64(val) * DefaultPow))) +func NewFromInt(val int64) Value { + return Value(val * DefaultPow) } -func NewFromInt(val int) Value { - return Value(int64(val * DefaultPow)) +func (a Value) MulExp(exp int) Value { + return Value(int64(float64(a) * math.Pow(10, float64(exp)))) } -func NewFromInt64(val int64) Value { - return Value(val * DefaultPow) +func (a Value) MulPow(v Value, exp Value) Value { + return Value(int64(float64(a) * math.Pow(v.Float64(), exp.Float64()))) +} + +func (a Value) NumIntDigits() int { + digits := 0 + target := int64(a) + for pow := int64(DefaultPow); pow <= target; pow *= 10 { + digits++ + } + return digits +} + +// TODO: speedup +func (a Value) NumFractionalDigits() int { + if a == 0 { + return 0 + } + numPow := 0 + for pow := int64(DefaultPow); pow%10 != 1; pow /= 10 { + numPow++ + } + numZeros := 0 + for v := int64(a); v%10 == 0; v /= 10 { + numZeros++ + } + return numPow - numZeros +} + +func Compare(x, y Value) int { + if x > y { + return 1 + } else if x == y { + return 0 + } else { + return -1 + } +} + +func (x Value) Compare(y Value) int { + if x > y { + return 1 + } else if x == y { + return 0 + } else { + return -1 + } +} + +func Min(a, b Value) Value { + if a < b { + return a + } + + return b +} + +func Max(a, b Value) Value { + if a > b { + return a + } + + return b +} + +func Equal(x, y Value) bool { + return x == y +} + +func (x Value) Eq(y Value) bool { + return x == y +} + +func Abs(a Value) Value { + if a < 0 { + return -a + } + return a } diff --git a/pkg/fixedpoint/dec.go b/pkg/fixedpoint/dec.go new file mode 100644 index 0000000000..f25c20d781 --- /dev/null +++ b/pkg/fixedpoint/dec.go @@ -0,0 +1,1303 @@ +//go:build dnum + +package fixedpoint + +import ( + "bytes" + "database/sql/driver" + "errors" + "fmt" + "math" + "math/bits" + "strconv" + "strings" +) + +type Value struct { + coef uint64 + sign int8 + exp int +} + +const ( + signPosInf = +2 + signPos = +1 + signZero = 0 + signNeg = -1 + signNegInf = -2 + coefMin = 1000_0000_0000_0000 + coefMax = 9999_9999_9999_9999 + digitsMax = 16 + shiftMax = digitsMax - 1 + // to switch between scientific notion and normal presentation format + maxLeadingZeros = 19 +) + +// common values +var ( + Zero = Value{} + One = Value{1000_0000_0000_0000, signPos, 1} + NegOne = Value{1000_0000_0000_0000, signNeg, 1} + PosInf = Value{1, signPosInf, 0} + NegInf = Value{1, signNegInf, 0} +) + +var pow10f = [...]float64{ + 1, + 10, + 100, + 1000, + 10000, + 100000, + 1000000, + 10000000, + 100000000, + 1000000000, + 10000000000, + 100000000000, + 1000000000000, + 10000000000000, + 100000000000000, + 1000000000000000, + 10000000000000000, + 100000000000000000, + 1000000000000000000, + 10000000000000000000, + 100000000000000000000} + +var pow10 = [...]uint64{ + 1, + 10, + 100, + 1000, + 10000, + 100000, + 1000000, + 10000000, + 100000000, + 1000000000, + 10000000000, + 100000000000, + 1000000000000, + 10000000000000, + 100000000000000, + 1000000000000000, + 10000000000000000, + 100000000000000000, + 1000000000000000000} + +var halfpow10 = [...]uint64{ + 0, + 5, + 50, + 500, + 5000, + 50000, + 500000, + 5000000, + 50000000, + 500000000, + 5000000000, + 50000000000, + 500000000000, + 5000000000000, + 50000000000000, + 500000000000000, + 5000000000000000, + 50000000000000000, + 500000000000000000, + 5000000000000000000} + +func min(a int, b int) int { + if a < b { + return a + } + return b +} + +func max(a int, b int) int { + if a > b { + return a + } + return b +} + +func (v Value) Value() (driver.Value, error) { + return v.Float64(), nil +} + +// NewFromInt returns a Value for an int +func NewFromInt(n int64) Value { + if n == 0 { + return Zero + } + //n0 := n + sign := int8(signPos) + if n < 0 { + n = -n + sign = signNeg + } + return newNoSignCheck(sign, uint64(n), digitsMax) +} + +const log2of10 = 3.32192809488736234 + +// NewFromFloat converts a float64 to a Value +func NewFromFloat(f float64) Value { + switch { + case math.IsInf(f, +1): + return PosInf + case math.IsInf(f, -1): + return NegInf + case math.IsNaN(f): + panic("value.NewFromFloat can't convert NaN") + } + + if f == 0 { + return Zero + } + + sign := int8(signPos) + if f < 0 { + f = -f + sign = signNeg + } + n := uint64(f) + if float64(n) == f { + return newNoSignCheck(sign, n, digitsMax) + } + _, e := math.Frexp(f) + e = int(float32(e) / log2of10) + c := uint64(f/math.Pow10(e-16) + 0.5) + return newNoSignCheck(sign, c, e) +} + +// Raw constructs a Value without normalizing - arguments must be valid. +// Used by SuValue Unpack +func Raw(sign int8, coef uint64, exp int) Value { + return Value{coef, sign, int(exp)} +} + +func newNoSignCheck(sign int8, coef uint64, exp int) Value { + atmax := false + for coef > coefMax { + coef = (coef + 5) / 10 + exp++ + atmax = true + } + + if !atmax { + p := maxShift(coef) + coef *= pow10[p] + exp -= p + } + return Value{coef, sign, exp} +} + +// New constructs a Value, maximizing coef and handling exp out of range +// Used to normalize results of operations +func New(sign int8, coef uint64, exp int) Value { + if sign == 0 || coef == 0 { + return Zero + } else if sign == signPosInf { + return PosInf + } else if sign == signNegInf { + return NegInf + } else { + atmax := false + for coef > coefMax { + coef = (coef + 5) / 10 + exp++ + atmax = true + } + + if !atmax { + p := maxShift(coef) + coef *= pow10[p] + exp -= p + } + return Value{coef, sign, exp} + } +} + +func maxShift(x uint64) int { + i := ilog10(x) + if i > shiftMax { + return 0 + } + return shiftMax - i +} + +func ilog10(x uint64) int { + // based on Hacker's Delight + if x == 0 { + return 0 + } + y := (19 * (63 - bits.LeadingZeros64(x))) >> 6 + if y < 18 && x >= pow10[y+1] { + y++ + } + return y +} + +func Inf(sign int8) Value { + switch { + case sign < 0: + return NegInf + case sign > 0: + return PosInf + default: + return Zero + } +} + +func (dn Value) FormatString(prec int) string { + if dn.sign == 0 { + if prec <= 0 { + return "0" + } else { + return "0." + strings.Repeat("0", prec) + } + } + sign := "" + if dn.sign < 0 { + sign = "-" + } + if dn.IsInf() { + return sign + "inf" + } + digits := getDigits(dn.coef) + nd := len(digits) + e := int(dn.exp) - nd + if -maxLeadingZeros <= dn.exp && dn.exp <= 0 { + // decimal to the left + if prec+e+nd > 0 { + return sign + "0." + strings.Repeat("0", -e-nd) + digits[:min(prec+e+nd, nd)] + strings.Repeat("0", max(0, prec-nd+e+nd)) + } else if -e-nd > 0 { + return "0." + strings.Repeat("0", -e-nd) + } else { + return "0" + } + } else if -nd < e && e <= -1 { + // decimal within + dec := nd + e + decimals := digits[dec:min(dec+prec, nd)] + return sign + digits[:dec] + "." + decimals + strings.Repeat("0", max(0, prec-len(decimals))) + } else if 0 < dn.exp && dn.exp <= digitsMax { + // decimal to the right + if prec > 0 { + return sign + digits + strings.Repeat("0", e) + "." + strings.Repeat("0", prec) + } else { + return sign + digits + strings.Repeat("0", e) + } + } else { + // scientific notation + after := "" + if nd > 1 { + after = "." + digits[1:min(1+prec, nd)] + strings.Repeat("0", max(0, min(1+prec, nd)-1-prec)) + } + return sign + digits[:1] + after + "e" + strconv.Itoa(int(dn.exp-1)) + } +} + +// String returns a string representation of the Value +func (dn Value) String() string { + if dn.sign == 0 { + return "0" + } + sign := "" + if dn.sign < 0 { + sign = "-" + } + if dn.IsInf() { + return sign + "inf" + } + digits := getDigits(dn.coef) + nd := len(digits) + e := int(dn.exp) - nd + if -maxLeadingZeros <= dn.exp && dn.exp <= 0 { + // decimal to the left + return sign + "0." + strings.Repeat("0", -e-nd) + digits + } else if -nd < e && e <= -1 { + // decimal within + dec := nd + e + return sign + digits[:dec] + "." + digits[dec:] + } else if 0 < dn.exp && dn.exp <= digitsMax { + // decimal to the right + return sign + digits + strings.Repeat("0", e) + } else { + // scientific notation + after := "" + if nd > 1 { + after = "." + digits[1:] + } + return sign + digits[:1] + after + "e" + strconv.Itoa(int(dn.exp-1)) + } +} + +func (dn Value) Percentage() string { + if dn.sign == 0 { + return "0%" + } + sign := "" + if dn.sign < 0 { + sign = "-" + } + if dn.IsInf() { + return sign + "inf%" + } + digits := getDigits(dn.coef) + nd := len(digits) + e := int(dn.exp) - nd + 2 + + if -maxLeadingZeros <= dn.exp && dn.exp <= -2 { + // decimal to the left + return sign + "0." + strings.Repeat("0", -e-nd) + digits + "%" + } else if -nd < e && e <= -1 { + // decimal within + dec := nd + e + return sign + digits[:dec] + "." + digits[dec:] + "%" + } else if -2 < dn.exp && dn.exp <= digitsMax { + // decimal to the right + return sign + digits + strings.Repeat("0", e) + "%" + } else { + // scientific notation + after := "" + if nd > 1 { + after = "." + digits[1:] + } + return sign + digits[:1] + after + "e" + strconv.Itoa(int(dn.exp-1)) + "%" + } +} + +func (dn Value) FormatPercentage(prec int) string { + if dn.sign == 0 { + if prec <= 0 { + return "0" + } else { + return "0." + strings.Repeat("0", prec) + } + } + sign := "" + if dn.sign < 0 { + sign = "-" + } + if dn.IsInf() { + return sign + "inf" + } + digits := getDigits(dn.coef) + nd := len(digits) + exp := dn.exp + 2 + e := int(exp) - nd + + if -maxLeadingZeros <= exp && exp <= 0 { + // decimal to the left + if prec+e+nd > 0 { + return sign + "0." + strings.Repeat("0", -e-nd) + digits[:min(prec+e+nd, nd)] + strings.Repeat("0", max(0, prec-nd+e+nd)) + "%" + } else if -e-nd > 0 { + return "0." + strings.Repeat("0", -e-nd) + "%" + } else { + return "0" + } + } else if -nd < e && e <= -1 { + // decimal within + dec := nd + e + decimals := digits[dec:min(dec+prec, nd)] + return sign + digits[:dec] + "." + decimals + strings.Repeat("0", max(0, prec-len(decimals))) + "%" + } else if 0 < exp && exp <= digitsMax { + // decimal to the right + if prec > 0 { + return sign + digits + strings.Repeat("0", e) + "." + strings.Repeat("0", prec) + "%" + } else { + return sign + digits + strings.Repeat("0", e) + "%" + } + } else { + // scientific notation + after := "" + if nd > 1 { + after = "." + digits[1:min(1+prec, nd)] + strings.Repeat("0", max(0, min(1+prec, nd)-1-prec)) + } + return sign + digits[:1] + after + "e" + strconv.Itoa(int(exp-1)) + "%" + } +} + +func (dn Value) SignedPercentage() string { + if dn.Sign() >= 0 { + return "+" + dn.Percentage() + } + return dn.Percentage() +} + +// get digit length +func (a Value) NumDigits() int { + i := shiftMax + coef := a.coef + nd := 0 + for coef != 0 && coef < pow10[i] { + i-- + } + for coef != 0 { + coef %= pow10[i] + i-- + nd++ + } + return nd +} + +// alias of Exp +func (a Value) NumIntDigits() int { + return a.exp +} + +// get fractional digits +func (a Value) NumFractionalDigits() int { + nd := a.NumDigits() + return nd - a.exp +} + +func getDigits(coef uint64) string { + var digits [digitsMax]byte + i := shiftMax + nd := 0 + for coef != 0 { + digits[nd] = byte('0' + (coef / pow10[i])) + coef %= pow10[i] + nd++ + i-- + } + return string(digits[:nd]) +} + +func (v *Value) Scan(src interface{}) error { + var err error + switch d := src.(type) { + case int64: + *v = NewFromInt(d) + return nil + case float64: + *v = NewFromFloat(d) + return nil + case []byte: + *v, err = NewFromString(string(d)) + if err != nil { + return err + } + return nil + default: + } + return fmt.Errorf("fixedpoint.Value scan error, type %T is not supported, value: %+v", src, src) +} + +// NewFromString parses a numeric string and returns a Value representation. +func NewFromString(s string) (Value, error) { + length := len(s) + if length == 0 { + return Zero, nil + } + isPercentage := s[length-1] == '%' + if isPercentage { + s = s[:length-1] + } + r := &reader{s, 0} + sign := r.getSign() + if r.matchStr("inf") { + return Inf(sign), nil + } + coef, exp := r.getCoef() + exp += r.getExp() + if r.len() != 0 { // didn't consume entire string + return Zero, errors.New("invalid number") + } else if coef == 0 || exp < math.MinInt8 { + return Zero, nil + } else if exp > math.MaxInt8 { + return Inf(sign), nil + } + if isPercentage { + exp -= 2 + } + atmax := false + for coef > coefMax { + coef = (coef + 5) / 10 + exp++ + atmax = true + } + + if !atmax { + p := maxShift(coef) + coef *= pow10[p] + exp -= p + } + //check(coefMin <= coef && coef <= coefMax) + return Value{coef, sign, exp}, nil +} + +func MustNewFromString(input string) Value { + v, err := NewFromString(input) + if err != nil { + panic(fmt.Errorf("cannot parse %s into fixedpoint, error: %s", input, err.Error())) + } + return v +} + +func NewFromBytes(s []byte) (Value, error) { + length := len(s) + if length == 0 { + return Zero, nil + } + isPercentage := s[length-1] == '%' + if isPercentage { + s = s[:length-1] + } + r := &readerBytes{s, 0} + sign := r.getSign() + if r.matchStr("inf") { + return Inf(sign), nil + } + coef, exp := r.getCoef() + exp += r.getExp() + if r.len() != 0 { // didn't consume entire string + return Zero, errors.New("invalid number") + } else if coef == 0 || exp < math.MinInt8 { + return Zero, nil + } else if exp > math.MaxInt8 { + return Inf(sign), nil + } + if isPercentage { + exp -= 2 + } + atmax := false + for coef > coefMax { + coef = (coef + 5) / 10 + exp++ + atmax = true + } + + if !atmax { + p := maxShift(coef) + coef *= pow10[p] + exp -= p + } + //check(coefMin <= coef && coef <= coefMax) + return Value{coef, sign, exp}, nil +} + +func MustNewFromBytes(input []byte) Value { + v, err := NewFromBytes(input) + if err != nil { + panic(fmt.Errorf("cannot parse %s into fixedpoint, error: %s", input, err.Error())) + } + return v +} + +// TODO: refactor by interface + +type readerBytes struct { + s []byte + i int +} + +func (r *readerBytes) cur() byte { + if r.i >= len(r.s) { + return 0 + } + return byte(r.s[r.i]) +} + +func (r *readerBytes) prev() byte { + if r.i == 0 { + return 0 + } + return byte(r.s[r.i-1]) +} + +func (r *readerBytes) len() int { + return len(r.s) - r.i +} + +func (r *readerBytes) match(c byte) bool { + if r.cur() == c { + r.i++ + return true + } + return false +} + +func (r *readerBytes) matchDigit() bool { + c := r.cur() + if '0' <= c && c <= '9' { + r.i++ + return true + } + return false +} + +func (r *readerBytes) matchStr(pre string) bool { + for i, c := range r.s[r.i:] { + if pre[i] != c { + return false + } + } + r.i += len(pre) + return true +} + +func (r *readerBytes) getSign() int8 { + if r.match('-') { + return int8(signNeg) + } + r.match('+') + return int8(signPos) +} + +func (r *readerBytes) getCoef() (uint64, int) { + digits := false + beforeDecimal := true + for r.match('0') { + digits = true + } + if r.cur() == '.' && r.len() > 1 { + digits = false + } + n := uint64(0) + exp := 0 + p := shiftMax + for { + c := r.cur() + if r.matchDigit() { + digits = true + // ignore extra decimal places + if c != '0' && p >= 0 { + n += uint64(c-'0') * pow10[p] + } + p-- + } else if beforeDecimal { + // decimal point or end + exp = shiftMax - p + if !r.match('.') { + break + } + beforeDecimal = false + if !digits { + for r.match('0') { + digits = true + exp-- + } + } + } else { + break + } + } + if !digits { + panic("numbers require at least one digit") + } + return n, exp +} + +func (r *readerBytes) getExp() int { + e := 0 + if r.match('e') || r.match('E') { + esign := r.getSign() + for r.matchDigit() { + e = e*10 + int(r.prev()-'0') + } + e *= int(esign) + } + return e +} + +type reader struct { + s string + i int +} + +func (r *reader) cur() byte { + if r.i >= len(r.s) { + return 0 + } + return byte(r.s[r.i]) +} + +func (r *reader) prev() byte { + if r.i == 0 { + return 0 + } + return byte(r.s[r.i-1]) +} + +func (r *reader) len() int { + return len(r.s) - r.i +} + +func (r *reader) match(c byte) bool { + if r.cur() == c { + r.i++ + return true + } + return false +} + +func (r *reader) matchDigit() bool { + c := r.cur() + if '0' <= c && c <= '9' { + r.i++ + return true + } + return false +} + +func (r *reader) matchStr(pre string) bool { + if strings.HasPrefix(r.s[r.i:], pre) { + r.i += len(pre) + return true + } + return false +} + +func (r *reader) getSign() int8 { + if r.match('-') { + return int8(signNeg) + } + r.match('+') + return int8(signPos) +} + +func (r *reader) getCoef() (uint64, int) { + digits := false + beforeDecimal := true + for r.match('0') { + digits = true + } + if r.cur() == '.' && r.len() > 1 { + digits = false + } + n := uint64(0) + exp := 0 + p := shiftMax + for { + c := r.cur() + if r.matchDigit() { + digits = true + // ignore extra decimal places + if c != '0' && p >= 0 { + n += uint64(c-'0') * pow10[p] + } + p-- + } else if beforeDecimal { + // decimal point or end + exp = shiftMax - p + if !r.match('.') { + break + } + beforeDecimal = false + if !digits { + for r.match('0') { + digits = true + exp-- + } + } + } else { + break + } + } + if !digits { + panic("numbers require at least one digit") + } + return n, exp +} + +func (r *reader) getExp() int { + e := 0 + if r.match('e') || r.match('E') { + esign := r.getSign() + for r.matchDigit() { + e = e*10 + int(r.prev()-'0') + } + e *= int(esign) + } + return e +} + +// end of FromStr --------------------------------------------------- + +// IsInf returns true if a Value is positive or negative infinite +func (dn Value) IsInf() bool { + return dn.sign == signPosInf || dn.sign == signNegInf +} + +// IsZero returns true if a Value is zero +func (dn Value) IsZero() bool { + return dn.sign == signZero +} + +// Float64 converts a Value to float64 +func (dn Value) Float64() float64 { + if dn.IsInf() { + return math.Inf(int(dn.sign)) + } + g := float64(dn.coef) + if dn.sign == signNeg { + g = -g + } + i := int(dn.exp) - digitsMax + return g * math.Pow(10, float64(i)) +} + +// Int64 converts a Value to an int64, returning whether it was convertible +func (dn Value) Int64() int64 { + if dn.sign == 0 { + return 0 + } + if dn.sign != signNegInf && dn.sign != signPosInf { + if 0 < dn.exp && dn.exp < digitsMax { + return int64(dn.sign) * int64(dn.coef/pow10[digitsMax-dn.exp]) + } else if dn.exp <= 0 && dn.coef != 0 { + result := math.Log10(float64(dn.coef)) - float64(digitsMax) + float64(dn.exp) + return int64(dn.sign) * int64(math.Pow(10, result)) + } + if dn.exp == digitsMax { + return int64(dn.sign) * int64(dn.coef) + } + if dn.exp == digitsMax+1 { + return int64(dn.sign) * (int64(dn.coef) * 10) + } + if dn.exp == digitsMax+2 { + return int64(dn.sign) * (int64(dn.coef) * 100) + } + if dn.exp == digitsMax+3 && dn.coef < math.MaxInt64/1000 { + return int64(dn.sign) * (int64(dn.coef) * 1000) + } + } + panic("unable to convert Value to int64") +} + +func (dn Value) Int() int { + // if int is int64, this is a nop + n := dn.Int64() + if int64(int(n)) != n { + panic("unable to convert Value to int32") + } + return int(n) +} + +// Sign returns -1 for negative, 0 for zero, and +1 for positive +func (dn Value) Sign() int { + return int(dn.sign) +} + +// Coef returns the coefficient +func (dn Value) Coef() uint64 { + return dn.coef +} + +// Exp returns the exponent +func (dn Value) Exp() int { + return int(dn.exp) +} + +// Frac returns the fractional portion, i.e. x - x.Int() +func (dn Value) Frac() Value { + if dn.sign == 0 || dn.sign == signNegInf || dn.sign == signPosInf || + dn.exp >= digitsMax { + return Zero + } + if dn.exp <= 0 { + return dn + } + frac := dn.coef % pow10[digitsMax-dn.exp] + if frac == dn.coef { + return dn + } + return New(dn.sign, frac, int(dn.exp)) +} + +type RoundingMode int + +const ( + Up RoundingMode = iota + Down + HalfUp +) + +// Trunc returns the integer portion (truncating any fractional part) +func (dn Value) Trunc() Value { + return dn.integer(Down) +} + +func (dn Value) integer(mode RoundingMode) Value { + if dn.sign == 0 || dn.sign == signNegInf || dn.sign == signPosInf || + dn.exp >= digitsMax { + return dn + } + if dn.exp <= 0 { + if mode == Up || + (mode == HalfUp && dn.exp == 0 && dn.coef >= One.coef*5) { + return New(dn.sign, One.coef, int(dn.exp)+1) + } + return Zero + } + e := digitsMax - dn.exp + frac := dn.coef % pow10[e] + if frac == 0 { + return dn + } + i := dn.coef - frac + if (mode == Up && frac > 0) || (mode == HalfUp && frac >= halfpow10[e]) { + return New(dn.sign, i+pow10[e], int(dn.exp)) // normalize + } + return Value{i, dn.sign, dn.exp} +} + +func (dn Value) Round(r int, mode RoundingMode) Value { + if dn.sign == 0 || dn.sign == signNegInf || dn.sign == signPosInf || + r >= digitsMax { + return dn + } + if r <= -digitsMax { + return Zero + } + n := New(dn.sign, dn.coef, int(dn.exp)+r) // multiply by 10^r + n = n.integer(mode) + if n.sign == signPos || n.sign == signNeg { // i.e. not zero or inf + return New(n.sign, n.coef, int(n.exp)-r) + } + return n +} + +// arithmetic operations ------------------------------------------------------- + +// Neg returns the Value negated i.e. sign reversed +func (dn Value) Neg() Value { + return Value{dn.coef, -dn.sign, dn.exp} +} + +// Abs returns the Value with a positive sign +func (dn Value) Abs() Value { + if dn.sign < 0 { + return Value{dn.coef, -dn.sign, dn.exp} + } + return dn +} + +// Equal returns true if two Value's are equal +func Equal(x, y Value) bool { + return x.sign == y.sign && x.exp == y.exp && x.coef == y.coef +} + +func (x Value) Eq(y Value) bool { + return Equal(x, y) +} + +func Max(x, y Value) Value { + if Compare(x, y) > 0 { + return x + } + return y +} + +func Min(x, y Value) Value { + if Compare(x, y) < 0 { + return x + } + return y +} + +// Compare compares two Value's returning -1 for <, 0 for ==, +1 for > +func Compare(x, y Value) int { + switch { + case x.sign < y.sign: + return -1 + case x.sign > y.sign: + return 1 + case x == y: + return 0 + } + sign := int(x.sign) + switch { + case sign == 0 || sign == signNegInf || sign == signPosInf: + return 0 + case x.exp < y.exp: + return -sign + case x.exp > y.exp: + return +sign + case x.coef < y.coef: + return -sign + case x.coef > y.coef: + return +sign + default: + return 0 + } +} + +func (x Value) Compare(y Value) int { + return Compare(x, y) +} + +func (v *Value) UnmarshalYAML(unmarshal func(a interface{}) error) (err error) { + var f float64 + if err = unmarshal(&f); err == nil { + *v = NewFromFloat(f) + return + } + var i int64 + if err = unmarshal(&i); err == nil { + *v = NewFromInt(i) + return + } + + var s string + if err = unmarshal(&s); err == nil { + nv, err2 := NewFromString(s) + if err2 == nil { + *v = nv + return + } + } + return err +} + +// FIXME: should we limit to 8 prec? +func (v Value) MarshalJSON() ([]byte, error) { + return []byte(v.FormatString(8)), nil +} + +func (v *Value) UnmarshalJSON(data []byte) error { + // FIXME: do we need to compare {}, [], "", or "null"? + if bytes.Compare(data, []byte{'n', 'u', 'l', 'l'}) == 0 { + *v = Zero + return nil + } + if len(data) == 0 { + *v = Zero + return nil + } + var err error + if data[0] == '"' { + data = data[1 : len(data)-1] + } + if *v, err = NewFromBytes(data); err != nil { + return err + } + return nil +} + +func Must(v Value, err error) Value { + if err != nil { + panic(err) + } + return v +} + +// v * 10^(exp) +func (v Value) MulExp(exp int) Value { + return Value{v.coef, v.sign, v.exp + exp} +} + +// Sub returns the difference of two Value's +func Sub(x, y Value) Value { + return Add(x, y.Neg()) +} + +func (x Value) Sub(y Value) Value { + return Sub(x, y) +} + +// Add returns the sum of two Value's +func Add(x, y Value) Value { + switch { + case x.sign == signZero: + return y + case y.sign == signZero: + return x + case x.IsInf(): + if y.sign == -x.sign { + return Zero + } + return x + case y.IsInf(): + return y + } + if !align(&x, &y) { + return x + } + if x.sign != y.sign { + return usub(x, y) + } + return uadd(x, y) +} + +func (x Value) Add(y Value) Value { + return Add(x, y) +} + +func uadd(x, y Value) Value { + return New(x.sign, x.coef+y.coef, int(x.exp)) +} + +func usub(x, y Value) Value { + if x.coef < y.coef { + return New(-x.sign, y.coef-x.coef, int(x.exp)) + } + return New(x.sign, x.coef-y.coef, int(x.exp)) +} + +func align(x, y *Value) bool { + if x.exp == y.exp { + return true + } + if x.exp < y.exp { + *x, *y = *y, *x // swap + } + yshift := ilog10(y.coef) + e := int(x.exp - y.exp) + if e > yshift { + return false + } + yshift = e + //check(0 <= yshift && yshift <= 20) + y.coef = (y.coef + halfpow10[yshift]) / pow10[yshift] + //check(int(y.exp)+yshift == int(x.exp)) + return true +} + +const e7 = 10000000 + +// Mul returns the product of two Value's +func Mul(x, y Value) Value { + sign := x.sign * y.sign + switch { + case sign == signZero: + return Zero + case x.IsInf() || y.IsInf(): + return Inf(sign) + } + e := int(x.exp) + int(y.exp) + + // split unevenly to use full 64 bit range to get more precision + // and avoid needing xlo * ylo + xhi := x.coef / e7 // 9 digits + xlo := x.coef % e7 // 7 digits + yhi := y.coef / e7 // 9 digits + ylo := y.coef % e7 // 7 digits + + c := xhi * yhi + if (xlo | ylo) != 0 { + c += (xlo*yhi + ylo*xhi) / e7 + } + return New(sign, c, e-2) +} + +func (x Value) Mul(y Value) Value { + return Mul(x, y) +} + +// Div returns the quotient of two Value's +func Div(x, y Value) Value { + sign := x.sign * y.sign + switch { + case x.sign == signZero: + return x + case y.sign == signZero: + return Inf(x.sign) + case x.IsInf(): + if y.IsInf() { + if sign < 0 { + return NegOne + } + return One + } + return Inf(sign) + case y.IsInf(): + return Zero + } + coef := div128(x.coef, y.coef) + return New(sign, coef, int(x.exp)-int(y.exp)) +} + +func (x Value) Div(y Value) Value { + return Div(x, y) +} + +// Hash returns a hash value for a Value +func (dn Value) Hash() uint32 { + return uint32(dn.coef>>32) ^ uint32(dn.coef) ^ + uint32(dn.sign)<<16 ^ uint32(dn.exp)<<8 +} + +// Format converts a number to a string with a specified format +func (dn Value) Format(mask string) string { + if dn.IsInf() { + return "#" + } + n := dn + before := 0 + after := 0 + intpart := true + for _, mc := range mask { + switch mc { + case '.': + intpart = false + case '#': + if intpart { + before++ + } else { + after++ + } + } + } + if before+after == 0 || n.Exp() > before { + return "#" // too big to fit in mask + } + n = n.Round(after, HalfUp) + e := n.Exp() + var digits []byte + if n.IsZero() && after == 0 { + digits = []byte("0") + e = 1 + } else { + digits = strconv.AppendUint(make([]byte, 0, digitsMax), n.Coef(), 10) + digits = bytes.TrimRight(digits, "0") + } + nd := len(digits) + + di := e - before + //check(di <= 0) + var buf strings.Builder + sign := n.Sign() + signok := (sign >= 0) + frac := false + for _, mc := range []byte(mask) { + switch mc { + case '#': + if 0 <= di && di < nd { + buf.WriteByte(digits[di]) + } else if frac || di >= 0 { + buf.WriteByte('0') + } + di++ + case ',': + if di > 0 { + buf.WriteByte(',') + } + case '-', '(': + signok = true + if sign < 0 { + buf.WriteByte(mc) + } + case ')': + if sign < 0 { + buf.WriteByte(mc) + } else { + buf.WriteByte(' ') + } + case '.': + frac = true + fallthrough + default: + buf.WriteByte(mc) + } + } + if !signok { + return "-" // negative not handled by mask + } + return buf.String() +} diff --git a/pkg/fixedpoint/dec_dnum_test.go b/pkg/fixedpoint/dec_dnum_test.go new file mode 100644 index 0000000000..d92d496397 --- /dev/null +++ b/pkg/fixedpoint/dec_dnum_test.go @@ -0,0 +1,37 @@ +//go:build dnum + +package fixedpoint + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestDelta(t *testing.T) { + f1 := MustNewFromString("0.0009763593380614657") + f2 := NewFromInt(42300) + assert.InDelta(t, f1.Mul(f2).Float64(), 41.3, 1e-14) +} + +func TestInternal(t *testing.T) { + r := &reader{"1.1e-15", 0} + c, e := r.getCoef() + assert.Equal(t, uint64(1100000000000000), c) + assert.Equal(t, 1, e) + f := MustNewFromString("1.1e-15") + digits := getDigits(f.coef) + assert.Equal(t, "11", digits) + f = MustNewFromString("1.00000000000000111") + assert.Equal(t, "1.000000000000001", f.String()) + f = MustNewFromString("1.1e-15") + assert.Equal(t, "0.0000000000000011", f.String()) + assert.Equal(t, 16, f.NumFractionalDigits()) + f = MustNewFromString("1.00000000000000111") + assert.Equal(t, "1.000000000000001", f.String()) + f = MustNewFromString("0.00000000000000000001000111") + assert.Equal(t, "0.00000000000000000001000111", f.String()) + f = MustNewFromString("0.000000000000000000001000111") + assert.Equal(t, "1.000111e-21", f.String()) + f = MustNewFromString("1e-100") + assert.Equal(t, 100, f.NumFractionalDigits()) +} diff --git a/pkg/fixedpoint/dec_legacy_test.go b/pkg/fixedpoint/dec_legacy_test.go new file mode 100644 index 0000000000..848b77ec68 --- /dev/null +++ b/pkg/fixedpoint/dec_legacy_test.go @@ -0,0 +1,33 @@ +//go:build !dnum + +package fixedpoint + +import ( + "testing" +) + +func TestNumFractionalDigitsLegacy(t *testing.T) { + tests := []struct { + name string + v Value + want int + }{ + { + name: "over the default precision", + v: MustNewFromString("0.123456789"), + want: 8, + }, + { + name: "zero underflow", + v: MustNewFromString("1e-100"), + want: 0, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := tt.v.NumFractionalDigits(); got != tt.want { + t.Errorf("NumFractionalDigitsLegacy() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/fixedpoint/dec_test.go b/pkg/fixedpoint/dec_test.go new file mode 100644 index 0000000000..3937a72bde --- /dev/null +++ b/pkg/fixedpoint/dec_test.go @@ -0,0 +1,241 @@ +package fixedpoint + +import ( + "encoding/json" + "github.com/stretchr/testify/assert" + "math/big" + "testing" +) + +const Delta = 1e-9 + +func BenchmarkMul(b *testing.B) { + b.ResetTimer() + + b.Run("mul-float64", func(b *testing.B) { + for i := 0; i < b.N; i++ { + x := NewFromFloat(20.0) + y := NewFromFloat(20.0) + x = x.Mul(y) // nolint + } + }) + + b.Run("mul-float64-large-numbers", func(b *testing.B) { + for i := 0; i < b.N; i++ { + x := NewFromFloat(88.12345678) + y := NewFromFloat(88.12345678) + x = x.Mul(y) // nolint + } + }) + + b.Run("mul-big-small-numbers", func(b *testing.B) { + for i := 0; i < b.N; i++ { + x := big.NewFloat(20.0) + y := big.NewFloat(20.0) + x = new(big.Float).Mul(x, y) // nolint + } + }) + + b.Run("mul-big-large-numbers", func(b *testing.B) { + for i := 0; i < b.N; i++ { + x := big.NewFloat(88.12345678) + y := big.NewFloat(88.12345678) + x = new(big.Float).Mul(x, y) // nolint + } + }) +} + +func TestMulString(t *testing.T) { + x := NewFromFloat(10.55) + assert.Equal(t, "10.55", x.String()) + y := NewFromFloat(10.55) + x = x.Mul(y) + assert.Equal(t, "111.3025", x.String()) + assert.Equal(t, "111.30", x.FormatString(2)) + assert.InDelta(t, 111.3025, x.Float64(), Delta) +} + +func TestMulExp(t *testing.T) { + x, _ := NewFromString("166") + digits := x.NumIntDigits() + assert.Equal(t, digits, 3) + step := x.MulExp(-digits + 1) + assert.Equal(t, "1.66", step.String()) + step = x.MulPow(NewFromInt(10), NewFromInt(int64(-digits+1))) + assert.Equal(t, "1.66", step.String()) +} + +func TestNew(t *testing.T) { + f := NewFromFloat(0.001) + assert.Equal(t, "0.001", f.String()) + assert.Equal(t, "0.0010", f.FormatString(4)) + assert.Equal(t, "0.1%", f.Percentage()) + assert.Equal(t, "0.10%", f.FormatPercentage(2)) + f = NewFromFloat(0.1) + assert.Equal(t, "10%", f.Percentage()) + assert.Equal(t, "10%", f.FormatPercentage(0)) + f = NewFromFloat(0.01) + assert.Equal(t, "1%", f.Percentage()) + assert.Equal(t, "1%", f.FormatPercentage(0)) + f = NewFromFloat(0.111) + assert.Equal(t, "11.1%", f.Percentage()) + assert.Equal(t, "11.1%", f.FormatPercentage(1)) +} + +func TestFormatString(t *testing.T) { + testCases := []struct { + value Value + prec int + out string + }{ + { + value: NewFromFloat(0.001), + prec: 8, + out: "0.00100000", + }, + { + value: NewFromFloat(0.123456789), + prec: 4, + out: "0.1234", + }, + { + value: NewFromFloat(0.123456789), + prec: 5, + out: "0.12345", + }, + { + value: NewFromFloat(20.0), + prec: 0, + out: "20", + }, + } + for _, testCase := range testCases { + assert.Equal(t, testCase.out, testCase.value.FormatString(testCase.prec)) + } +} + +func TestRound(t *testing.T) { + f := NewFromFloat(1.2345) + f = f.Round(0, Down) + assert.Equal(t, "1", f.String()) + w := NewFromFloat(1.2345) + w = w.Trunc() + assert.Equal(t, "1", w.String()) + s := NewFromFloat(1.2345) + assert.Equal(t, "1.23", s.Round(2, Down).String()) +} + +func TestFromString(t *testing.T) { + f := MustNewFromString("0.004075") + assert.Equal(t, "0.004075", f.String()) + f = MustNewFromString("0.03") + assert.Equal(t, "0.03", f.String()) + + f = MustNewFromString("0.75%") + assert.Equal(t, "0.0075", f.String()) + f = MustNewFromString("1.1e-7") + assert.Equal(t, "0.00000011", f.String()) + f = MustNewFromString(".0%") + assert.Equal(t, Zero, f) + f = MustNewFromString("") + assert.Equal(t, Zero, f) +} + +func TestJson(t *testing.T) { + p := MustNewFromString("0") + e, err := json.Marshal(p) + assert.NoError(t, err) + assert.Equal(t, "0.00000000", string(e)) + p = MustNewFromString("1.00000003") + e, err = json.Marshal(p) + assert.NoError(t, err) + assert.Equal(t, "1.00000003", string(e)) + p = MustNewFromString("1.000000003") + e, err = json.Marshal(p) + assert.NoError(t, err) + assert.Equal(t, "1.00000000", string(e)) + p = MustNewFromString("1.000000008") + e, err = json.Marshal(p) + assert.NoError(t, err) + assert.Equal(t, "1.00000000", string(e)) + p = MustNewFromString("0.999999999") + e, err = json.Marshal(p) + assert.NoError(t, err) + assert.Equal(t, "0.99999999", string(e)) + + p = MustNewFromString("1.2e-9") + e, err = json.Marshal(p) + assert.NoError(t, err) + assert.Equal(t, "0.00000000", p.FormatString(8)) + assert.Equal(t, "0.00000000", string(e)) + + _ = json.Unmarshal([]byte("0.00153917575"), &p) + assert.Equal(t, "0.00153917", p.FormatString(8)) + + q := NewFromFloat(0.00153917575) + assert.Equal(t, p, q) + _ = json.Unmarshal([]byte("6e-8"), &p) + _ = json.Unmarshal([]byte("0.000062"), &q) + assert.Equal(t, "0.00006194", q.Sub(p).String()) +} + +func TestNumFractionalDigits(t *testing.T) { + tests := []struct { + name string + v Value + want int + }{ + { + name: "ignore the integer part", + v: MustNewFromString("123.4567"), + want: 4, + }, + { + name: "ignore the sign", + v: MustNewFromString("-123.4567"), + want: 4, + }, + { + name: "ignore the trailing zero", + v: MustNewFromString("-123.45000000"), + want: 2, + }, + { + name: "no fractional parts", + v: MustNewFromString("-1"), + want: 0, + }, + { + name: "no fractional parts", + v: MustNewFromString("-1.0"), + want: 0, + }, + { + name: "only fractional part", + v: MustNewFromString(".123456"), + want: 6, + }, + { + name: "percentage", + v: MustNewFromString("0.075%"), // 0.075 * 0.01 + want: 5, + }, + { + name: "scientific notation", + v: MustNewFromString("1.1e-7"), + want: 8, + }, + { + name: "zero", + v: MustNewFromString("0"), + want: 0, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := tt.v.NumFractionalDigits(); got != tt.want { + t.Errorf("NumFractionalDigits() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/fixedpoint/div128.go b/pkg/fixedpoint/div128.go new file mode 100644 index 0000000000..21d84c5c35 --- /dev/null +++ b/pkg/fixedpoint/div128.go @@ -0,0 +1,134 @@ +//go:build dnum +// +build dnum + +// Copyright Suneido Software Corp. All rights reserved. +// Governed by the MIT license found in the LICENSE file. + +package fixedpoint + +import ( + "math/bits" +) + +const ( + e16 = 1_0000_0000_0000_0000 + longMask = 0xffffffff + divNumBase = 1 << 32 + e16Hi = e16 >> 32 + e16Lo = e16 & longMask +) + +// returns (1e16 * dividend) / divisor +// Used by dnum divide +// Based on cSuneido code +// which is based on jSuneido code +// which is based on Java BigDecimal code +// which is based on Hacker's Delight and Knuth TAoCP Vol 2 +// A bit simpler with unsigned types +func div128(dividend, divisor uint64) uint64 { + //check(dividend != 0) + //check(divisor != 0) + // multiply dividend * e16 + d1Hi := dividend >> 32 + d1Lo := dividend & longMask + product := uint64(e16Lo) * d1Lo + d0 := product & longMask + d1 := product >> 32 + product = uint64(e16Hi)*d1Lo + d1 + d1 = product & longMask + d2 := product >> 32 + product = uint64(e16Lo)*d1Hi + d1 + d1 = product & longMask + d2 += product >> 32 + d3 := d2 >> 32 + d2 &= longMask + product = e16Hi*d1Hi + d2 + d2 = product & longMask + d3 = ((product >> 32) + d3) & longMask + dividendHi := make64(uint32(d3), uint32(d2)) + dividendLo := make64(uint32(d1), uint32(d0)) + // divide + return divide128(dividendHi, dividendLo, divisor) +} + +func divide128(dividendHi, dividendLo, divisor uint64) uint64 { + // so we can shift dividend as much as divisor + // don't allow equals to avoid quotient overflow (by 1) + //check(dividendHi < divisor) + + // maximize divisor (bit wise), since we're mostly using the top half + shift := uint(bits.LeadingZeros64(divisor)) + divisor = divisor << shift + + // split divisor + v1 := divisor >> 32 + v0 := divisor & longMask + + // matching shift + dls := dividendLo << shift + // split dividendLo + u1 := uint32(dls >> 32) + u0 := uint32(dls & longMask) + + // tmp1 = top 64 of dividend << shift + tmp1 := (dividendHi << shift) | (dividendLo >> (64 - shift)) + var q1, rtmp1 uint64 + if v1 == 1 { + q1 = tmp1 + rtmp1 = 0 + } else { + //check(tmp1 >= 0) + q1 = tmp1 / v1 // DIVIDE top 64 / top 32 + rtmp1 = tmp1 % v1 // remainder + } + + // adjust if quotient estimate too large + //check(q1 < divNumBase) + for q1*v0 > make64(uint32(rtmp1), u1) { + // done about 5.5 per 10,000 divides + q1-- + rtmp1 += v1 + if rtmp1 >= divNumBase { + break + } + } + //check(q1 >= 0) + u2 := tmp1 & longMask // low half + + // u2,u1 is the MIDDLE 64 bits of the dividend + tmp2 := mulsub(uint32(u2), uint32(u1), uint32(v1), uint32(v0), q1) + var q0, rtmp2 uint64 + if v1 == 1 { + q0 = tmp2 + rtmp2 = 0 + } else { + q0 = tmp2 / v1 // DIVIDE dividend remainder 64 / divisor high 32 + rtmp2 = tmp2 % v1 + } + + // adjust if quotient estimate too large + //check(q0 < divNumBase) + for q0*v0 > make64(uint32(rtmp2), u0) { + // done about .33 times per divide + q0-- + rtmp2 += v1 + if rtmp2 >= divNumBase { + break + } + //check(q0 < divNumBase) + } + + //check(q1 <= math.MaxUint32) + //check(q0 <= math.MaxUint32) + return make64(uint32(q1), uint32(q0)) +} + +// mulsub returns u1,u0 - v1,v0 * q0 +func mulsub(u1, u0, v1, v0 uint32, q0 uint64) uint64 { + tmp := uint64(u0) - q0*uint64(v0) + return make64(u1+uint32(tmp>>32)-uint32(q0*uint64(v1)), uint32(tmp&longMask)) +} + +func make64(hi, lo uint32) uint64 { + return uint64(hi)<<32 | uint64(lo) +} diff --git a/pkg/grpc/convert.go b/pkg/grpc/convert.go new file mode 100644 index 0000000000..928cbd4499 --- /dev/null +++ b/pkg/grpc/convert.go @@ -0,0 +1,237 @@ +package grpc + +import ( + "fmt" + "strconv" + + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/pb" + "github.com/c9s/bbgo/pkg/types" +) + +func toSubscriptions(sub *pb.Subscription) (types.Subscription, error) { + switch sub.Channel { + case pb.Channel_TRADE: + return types.Subscription{ + Symbol: sub.Symbol, + Channel: types.MarketTradeChannel, + }, nil + + case pb.Channel_BOOK: + return types.Subscription{ + Symbol: sub.Symbol, + Channel: types.BookChannel, + Options: types.SubscribeOptions{ + Depth: types.Depth(sub.Depth), + }, + }, nil + + case pb.Channel_KLINE: + return types.Subscription{ + Symbol: sub.Symbol, + Channel: types.KLineChannel, + Options: types.SubscribeOptions{ + Interval: types.Interval(sub.Interval), + }, + }, nil + } + + return types.Subscription{}, fmt.Errorf("unsupported subscription channel: %s", sub.Channel) +} + +func transPriceVolume(srcPvs types.PriceVolumeSlice) (pvs []*pb.PriceVolume) { + for _, srcPv := range srcPvs { + pvs = append(pvs, &pb.PriceVolume{ + Price: srcPv.Price.String(), + Volume: srcPv.Volume.String(), + }) + } + return pvs +} + +func transBook(session *bbgo.ExchangeSession, book types.SliceOrderBook, event pb.Event) *pb.MarketData { + return &pb.MarketData{ + Session: session.Name, + Exchange: session.ExchangeName.String(), + Symbol: book.Symbol, + Channel: pb.Channel_BOOK, + Event: event, + Depth: &pb.Depth{ + Exchange: session.ExchangeName.String(), + Symbol: book.Symbol, + Asks: transPriceVolume(book.Asks), + Bids: transPriceVolume(book.Bids), + }, + } +} + +func toOrderType(orderType pb.OrderType) types.OrderType { + switch orderType { + case pb.OrderType_MARKET: + return types.OrderTypeMarket + case pb.OrderType_LIMIT: + return types.OrderTypeLimit + + } + + log.Warnf("unexpected order type: %v", orderType) + return types.OrderTypeLimit +} + +func toSide(side pb.Side) types.SideType { + switch side { + case pb.Side_BUY: + return types.SideTypeBuy + case pb.Side_SELL: + return types.SideTypeSell + + } + + log.Warnf("unexpected side type: %v", side) + return types.SideTypeBuy +} + +func toSubmitOrders(pbOrders []*pb.SubmitOrder) (submitOrders []types.SubmitOrder) { + for _, pbOrder := range pbOrders { + submitOrders = append(submitOrders, types.SubmitOrder{ + ClientOrderID: pbOrder.ClientOrderId, + Symbol: pbOrder.Symbol, + Side: toSide(pbOrder.Side), + Type: toOrderType(pbOrder.OrderType), + Price: fixedpoint.MustNewFromString(pbOrder.Price), + Quantity: fixedpoint.MustNewFromString(pbOrder.Quantity), + StopPrice: fixedpoint.MustNewFromString(pbOrder.StopPrice), + TimeInForce: "", + }) + } + + return submitOrders +} + +func transBalances(session *bbgo.ExchangeSession, balances types.BalanceMap) (pbBalances []*pb.Balance) { + for _, b := range balances { + pbBalances = append(pbBalances, &pb.Balance{ + Exchange: session.ExchangeName.String(), + Currency: b.Currency, + Available: b.Available.String(), + Locked: b.Locked.String(), + }) + } + return pbBalances +} + +func transTrade(session *bbgo.ExchangeSession, trade types.Trade) *pb.Trade { + return &pb.Trade{ + Session: session.Name, + Exchange: trade.Exchange.String(), + Symbol: trade.Symbol, + Id: strconv.FormatUint(trade.ID, 10), + Price: trade.Price.String(), + Quantity: trade.Quantity.String(), + CreatedAt: trade.Time.UnixMilli(), + Side: transSide(trade.Side), + FeeCurrency: trade.FeeCurrency, + Fee: trade.Fee.String(), + Maker: trade.IsMaker, + } +} + +func transMarketTrade(session *bbgo.ExchangeSession, marketTrade types.Trade) *pb.MarketData { + return &pb.MarketData{ + Session: session.Name, + Exchange: session.ExchangeName.String(), + Symbol: marketTrade.Symbol, + Channel: pb.Channel_TRADE, + Event: pb.Event_UPDATE, + Trades: []*pb.Trade{ + { + Exchange: marketTrade.Exchange.String(), + Symbol: marketTrade.Symbol, + Id: strconv.FormatUint(marketTrade.ID, 10), + Price: marketTrade.Price.String(), + Quantity: marketTrade.Quantity.String(), + CreatedAt: marketTrade.Time.UnixMilli(), + Side: transSide(marketTrade.Side), + FeeCurrency: marketTrade.FeeCurrency, + Fee: marketTrade.Fee.String(), + Maker: marketTrade.IsMaker, + }, + }, + } +} + +func transSide(side types.SideType) pb.Side { + switch side { + case types.SideTypeBuy: + return pb.Side_BUY + case types.SideTypeSell: + return pb.Side_SELL + } + + return pb.Side_SELL +} + +func transOrderType(orderType types.OrderType) pb.OrderType { + switch orderType { + case types.OrderTypeLimit: + return pb.OrderType_LIMIT + case types.OrderTypeMarket: + return pb.OrderType_MARKET + case types.OrderTypeStopLimit: + return pb.OrderType_STOP_LIMIT + case types.OrderTypeStopMarket: + return pb.OrderType_STOP_MARKET + } + + return pb.OrderType_LIMIT +} + +func transOrder(session *bbgo.ExchangeSession, order types.Order) *pb.Order { + return &pb.Order{ + Exchange: order.Exchange.String(), + Symbol: order.Symbol, + Id: strconv.FormatUint(order.OrderID, 10), + Side: transSide(order.Side), + OrderType: transOrderType(order.Type), + Price: order.Price.String(), + StopPrice: order.StopPrice.String(), + Status: string(order.Status), + CreatedAt: order.CreationTime.UnixMilli(), + Quantity: order.Quantity.String(), + ExecutedQuantity: order.ExecutedQuantity.String(), + ClientOrderId: order.ClientOrderID, + GroupId: int64(order.GroupID), + } +} + +func transKLine(session *bbgo.ExchangeSession, kline types.KLine) *pb.KLine { + return &pb.KLine{ + Session: session.Name, + Exchange: kline.Exchange.String(), + Symbol: kline.Symbol, + Open: kline.Open.String(), + High: kline.High.String(), + Low: kline.Low.String(), + Close: kline.Close.String(), + Volume: kline.Volume.String(), + QuoteVolume: kline.QuoteVolume.String(), + StartTime: kline.StartTime.UnixMilli(), + EndTime: kline.StartTime.UnixMilli(), + Closed: kline.Closed, + } +} + +func transKLineResponse(session *bbgo.ExchangeSession, kline types.KLine) *pb.MarketData { + return &pb.MarketData{ + Session: session.Name, + Exchange: kline.Exchange.String(), + Symbol: kline.Symbol, + Channel: pb.Channel_KLINE, + Event: pb.Event_UPDATE, + Kline: transKLine(session, kline), + SubscribedAt: 0, + } +} diff --git a/pkg/grpc/server.go b/pkg/grpc/server.go new file mode 100644 index 0000000000..ad6df87993 --- /dev/null +++ b/pkg/grpc/server.go @@ -0,0 +1,355 @@ +package grpc + +import ( + "context" + "fmt" + "net" + "strconv" + "time" + + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" + "google.golang.org/grpc" + "google.golang.org/grpc/reflection" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/pb" + "github.com/c9s/bbgo/pkg/types" +) + +type TradingService struct { + Config *bbgo.Config + Environ *bbgo.Environment + Trader *bbgo.Trader + + pb.UnimplementedTradingServiceServer +} + +func (s *TradingService) SubmitOrder(ctx context.Context, request *pb.SubmitOrderRequest) (*pb.SubmitOrderResponse, error) { + sessionName := request.Session + + if len(sessionName) == 0 { + return nil, fmt.Errorf("session name can not be empty") + } + + session, ok := s.Environ.Session(sessionName) + if !ok { + return nil, fmt.Errorf("session %s not found", sessionName) + } + + submitOrders := toSubmitOrders(request.SubmitOrders) + for i := range submitOrders { + if market, ok := session.Market(submitOrders[i].Symbol); ok { + submitOrders[i].Market = market + } else { + log.Warnf("session %s market %s not found", sessionName, submitOrders[i].Symbol) + } + } + + createdOrders, err := session.Exchange.SubmitOrders(ctx, submitOrders...) + if err != nil { + return nil, err + } + + resp := &pb.SubmitOrderResponse{ + Session: sessionName, + Orders: nil, + } + for _, createdOrder := range createdOrders { + resp.Orders = append(resp.Orders, transOrder(session, createdOrder)) + } + + return resp, nil +} + +func (s *TradingService) CancelOrder(ctx context.Context, request *pb.CancelOrderRequest) (*pb.CancelOrderResponse, error) { + sessionName := request.Session + + if len(sessionName) == 0 { + return nil, fmt.Errorf("session name can not be empty") + } + + session, ok := s.Environ.Session(sessionName) + if !ok { + return nil, fmt.Errorf("session %s not found", sessionName) + } + + uuidOrderID := "" + orderID, err := strconv.ParseUint(request.OrderId, 10, 64) + if err != nil { + // TODO: validate uuid + uuidOrderID = request.OrderId + } + + session.Exchange.CancelOrders(ctx, types.Order{ + SubmitOrder: types.SubmitOrder{ + ClientOrderID: request.ClientOrderId, + }, + OrderID: orderID, + UUID: uuidOrderID, + }) + + resp := &pb.CancelOrderResponse{} + return resp, nil +} + +func (s *TradingService) QueryOrder(ctx context.Context, request *pb.QueryOrderRequest) (*pb.QueryOrderResponse, error) { + panic("implement me") +} + +func (s *TradingService) QueryOrders(ctx context.Context, request *pb.QueryOrdersRequest) (*pb.QueryOrdersResponse, error) { + panic("implement me") +} + +func (s *TradingService) QueryTrades(ctx context.Context, request *pb.QueryTradesRequest) (*pb.QueryTradesResponse, error) { + panic("implement me") +} + +type UserDataService struct { + Config *bbgo.Config + Environ *bbgo.Environment + Trader *bbgo.Trader + + pb.UnimplementedUserDataServiceServer +} + +func (s *UserDataService) Subscribe(request *pb.UserDataRequest, server pb.UserDataService_SubscribeServer) error { + sessionName := request.Session + + if len(sessionName) == 0 { + return fmt.Errorf("session name can not be empty") + } + + session, ok := s.Environ.Session(sessionName) + if !ok { + return fmt.Errorf("session %s not found", sessionName) + } + + userDataStream := session.Exchange.NewStream() + userDataStream.OnOrderUpdate(func(order types.Order) { + err := server.Send(&pb.UserData{ + Channel: pb.Channel_ORDER, + Event: pb.Event_UPDATE, + Orders: []*pb.Order{transOrder(session, order)}, + }) + if err != nil { + log.WithError(err).Errorf("grpc: can not send user data") + } + }) + userDataStream.OnTradeUpdate(func(trade types.Trade) { + err := server.Send(&pb.UserData{ + Channel: pb.Channel_TRADE, + Event: pb.Event_UPDATE, + Trades: []*pb.Trade{transTrade(session, trade)}, + }) + if err != nil { + log.WithError(err).Errorf("grpc: can not send user data") + } + }) + + balanceHandler := func(balances types.BalanceMap) { + err := server.Send(&pb.UserData{ + Channel: pb.Channel_BALANCE, + Event: pb.Event_UPDATE, + Balances: transBalances(session, balances), + }) + if err != nil { + log.WithError(err).Errorf("grpc: can not send user data") + } + } + userDataStream.OnBalanceUpdate(balanceHandler) + userDataStream.OnBalanceSnapshot(balanceHandler) + + ctx := server.Context() + + balances, err := session.Exchange.QueryAccountBalances(ctx) + if err != nil { + return err + } + + err = server.Send(&pb.UserData{ + Channel: pb.Channel_BALANCE, + Event: pb.Event_SNAPSHOT, + Balances: transBalances(session, balances), + }) + if err != nil { + log.WithError(err).Errorf("grpc: can not send user data") + } + + go userDataStream.Connect(ctx) + + defer func() { + if err := userDataStream.Close(); err != nil { + log.WithError(err).Errorf("user data stream close error") + } + }() + + <-ctx.Done() + return nil +} + +type MarketDataService struct { + Config *bbgo.Config + Environ *bbgo.Environment + Trader *bbgo.Trader + + pb.UnimplementedMarketDataServiceServer +} + +func (s *MarketDataService) Subscribe(request *pb.SubscribeRequest, server pb.MarketDataService_SubscribeServer) error { + exchangeSubscriptions := map[string][]types.Subscription{} + for _, sub := range request.Subscriptions { + session, ok := s.Environ.Session(sub.Exchange) + if !ok { + return fmt.Errorf("exchange %s not found", sub.Exchange) + } + + ss, err := toSubscriptions(sub) + if err != nil { + return err + } + + exchangeSubscriptions[session.Name] = append(exchangeSubscriptions[session.Name], ss) + } + + streamPool := map[string]types.Stream{} + for sessionName, subs := range exchangeSubscriptions { + session, ok := s.Environ.Session(sessionName) + if !ok { + log.Errorf("session %s not found", sessionName) + continue + } + + stream := session.Exchange.NewStream() + stream.SetPublicOnly() + for _, sub := range subs { + log.Infof("%s subscribe %s %s %+v", sessionName, sub.Channel, sub.Symbol, sub.Options) + stream.Subscribe(sub.Channel, sub.Symbol, sub.Options) + } + + stream.OnMarketTrade(func(trade types.Trade) { + if err := server.Send(transMarketTrade(session, trade)); err != nil { + log.WithError(err).Error("grpc stream send error") + } + }) + + stream.OnBookSnapshot(func(book types.SliceOrderBook) { + if err := server.Send(transBook(session, book, pb.Event_SNAPSHOT)); err != nil { + log.WithError(err).Error("grpc stream send error") + } + }) + + stream.OnBookUpdate(func(book types.SliceOrderBook) { + if err := server.Send(transBook(session, book, pb.Event_UPDATE)); err != nil { + log.WithError(err).Error("grpc stream send error") + } + }) + stream.OnKLineClosed(func(kline types.KLine) { + err := server.Send(transKLineResponse(session, kline)) + if err != nil { + log.WithError(err).Error("grpc stream send error") + } + }) + streamPool[sessionName] = stream + } + + for _, stream := range streamPool { + go stream.Connect(server.Context()) + } + + defer func() { + for _, stream := range streamPool { + if err := stream.Close(); err != nil { + log.WithError(err).Errorf("market data stream close error") + } + } + }() + + ctx := server.Context() + <-ctx.Done() + return ctx.Err() +} + +func (s *MarketDataService) QueryKLines(ctx context.Context, request *pb.QueryKLinesRequest) (*pb.QueryKLinesResponse, error) { + exchangeName, err := types.ValidExchangeName(request.Exchange) + if err != nil { + return nil, err + } + + for _, session := range s.Environ.Sessions() { + if session.ExchangeName == exchangeName { + response := &pb.QueryKLinesResponse{ + Klines: nil, + Error: nil, + } + + options := types.KLineQueryOptions{ + Limit: int(request.Limit), + } + + endTime := time.Now() + if request.EndTime != 0 { + endTime = time.Unix(request.EndTime, 0) + } + options.EndTime = &endTime + + if request.StartTime != 0 { + startTime := time.Unix(request.StartTime, 0) + options.StartTime = &startTime + } + + klines, err := session.Exchange.QueryKLines(ctx, request.Symbol, types.Interval(request.Interval), options) + if err != nil { + return nil, err + } + + for _, kline := range klines { + response.Klines = append(response.Klines, transKLine(session, kline)) + } + + return response, nil + } + } + + return nil, nil +} + +type Server struct { + Config *bbgo.Config + Environ *bbgo.Environment + Trader *bbgo.Trader +} + +func (s *Server) ListenAndServe(bind string) error { + conn, err := net.Listen("tcp", bind) + if err != nil { + return errors.Wrapf(err, "failed to bind network at %s", bind) + } + + var grpcServer = grpc.NewServer() + pb.RegisterMarketDataServiceServer(grpcServer, &MarketDataService{ + Config: s.Config, + Environ: s.Environ, + Trader: s.Trader, + }) + + pb.RegisterTradingServiceServer(grpcServer, &TradingService{ + Config: s.Config, + Environ: s.Environ, + Trader: s.Trader, + }) + + pb.RegisterUserDataServiceServer(grpcServer, &UserDataService{ + Config: s.Config, + Environ: s.Environ, + Trader: s.Trader, + }) + + reflection.Register(grpcServer) + + if err := grpcServer.Serve(conn); err != nil { + return errors.Wrap(err, "failed to serve grpc connections") + } + + return nil +} diff --git a/pkg/indicator/ad.go b/pkg/indicator/ad.go new file mode 100644 index 0000000000..d7263a5ab7 --- /dev/null +++ b/pkg/indicator/ad.go @@ -0,0 +1,79 @@ +package indicator + +import ( + "time" + + "github.com/c9s/bbgo/pkg/types" +) + +/* +ad implements accumulation/distribution indicator + +Accumulation/Distribution Indicator (A/D) +- https://www.investopedia.com/terms/a/accumulationdistribution.asp +*/ +//go:generate callbackgen -type AD +type AD struct { + types.IntervalWindow + Values types.Float64Slice + PrePrice float64 + + EndTime time.Time + UpdateCallbacks []func(value float64) +} + +func (inc *AD) Update(high, low, cloze, volume float64) { + var moneyFlowVolume float64 + if high == low { + moneyFlowVolume = 0 + } else { + moneyFlowVolume = ((2*cloze - high - low) / (high - low)) * volume + } + + ad := inc.Last() + moneyFlowVolume + inc.Values.Push(ad) +} + +func (inc *AD) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *AD) Index(i int) float64 { + length := len(inc.Values) + if length == 0 || length-i-1 < 0 { + return 0 + } + return inc.Values[length-i-1] +} + +func (inc *AD) Length() int { + return len(inc.Values) +} + +var _ types.Series = &AD{} + +func (inc *AD) calculateAndUpdate(kLines []types.KLine) { + for _, k := range kLines { + if inc.EndTime != zeroTime && !k.EndTime.After(inc.EndTime) { + continue + } + inc.Update(k.High.Float64(), k.Low.Float64(), k.Close.Float64(), k.Volume.Float64()) + } + + inc.EmitUpdate(inc.Last()) + inc.EndTime = kLines[len(kLines)-1].EndTime.Time() +} +func (inc *AD) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *AD) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/ad_callbacks.go b/pkg/indicator/ad_callbacks.go new file mode 100644 index 0000000000..dd1e9e5bef --- /dev/null +++ b/pkg/indicator/ad_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type AD"; DO NOT EDIT. + +package indicator + +import () + +func (inc *AD) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *AD) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/alma.go b/pkg/indicator/alma.go new file mode 100644 index 0000000000..44e0f18c7f --- /dev/null +++ b/pkg/indicator/alma.go @@ -0,0 +1,95 @@ +package indicator + +import ( + "math" + + "github.com/c9s/bbgo/pkg/types" +) + +// Refer: Arnaud Legoux Moving Average +// Refer: https://capital.com/arnaud-legoux-moving-average +// Also check https://github.com/DaveSkender/Stock.Indicators/blob/main/src/a-d/Alma/Alma.cs +// @param offset: Gaussian applied to the combo line. 1->ema, 0->sma +// @param sigma: the standard deviation applied to the combo line. This makes the combo line sharper +//go:generate callbackgen -type ALMA +type ALMA struct { + types.IntervalWindow // required + Offset float64 // required: recommend to be 5 + Sigma int // required: recommend to be 0.5 + Weight []float64 + Sum float64 + input []float64 + Values types.Float64Slice + UpdateCallbacks []func(value float64) +} + +const MaxNumOfALMA = 5_000 +const MaxNumOfALMATruncateSize = 100 + +func (inc *ALMA) Update(value float64) { + if inc.Weight == nil { + inc.Weight = make([]float64, inc.Window) + m := inc.Offset * (float64(inc.Window) - 1.) + s := float64(inc.Window) / float64(inc.Sigma) + inc.Sum = 0. + for i := 0; i < inc.Window; i++ { + diff := float64(i) - m + wt := math.Exp(-diff * diff / 2. / s / s) + inc.Sum += wt + inc.Weight[i] = wt + } + } + inc.input = append(inc.input, value) + if len(inc.input) >= inc.Window { + weightedSum := 0.0 + inc.input = inc.input[len(inc.input)-inc.Window:] + for i := 0; i < inc.Window; i++ { + weightedSum += inc.Weight[inc.Window-i-1] * inc.input[i] + } + inc.Values.Push(weightedSum / inc.Sum) + if len(inc.Values) > MaxNumOfALMA { + inc.Values = inc.Values[MaxNumOfALMATruncateSize-1:] + } + } +} + +func (inc *ALMA) Last() float64 { + if len(inc.Values) == 0 { + return 0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *ALMA) Index(i int) float64 { + if i >= len(inc.Values) { + return 0 + } + return inc.Values[len(inc.Values)-i-1] +} + +func (inc *ALMA) Length() int { + return len(inc.Values) +} + +func (inc *ALMA) calculateAndUpdate(allKLines []types.KLine) { + if inc.input == nil { + for _, k := range allKLines { + inc.Update(k.Close.Float64()) + inc.EmitUpdate(inc.Last()) + } + return + } + inc.Update(allKLines[len(allKLines)-1].Close.Float64()) + inc.EmitUpdate(inc.Last()) +} + +func (inc *ALMA) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + inc.calculateAndUpdate(window) +} + +func (inc *ALMA) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/alma_callbacks.go b/pkg/indicator/alma_callbacks.go new file mode 100644 index 0000000000..52d2b2f73b --- /dev/null +++ b/pkg/indicator/alma_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type ALMA"; DO NOT EDIT. + +package indicator + +import () + +func (inc *ALMA) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *ALMA) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/alma_test.go b/pkg/indicator/alma_test.go new file mode 100644 index 0000000000..c1dc4a2c18 --- /dev/null +++ b/pkg/indicator/alma_test.go @@ -0,0 +1,61 @@ +package indicator + +import ( + "encoding/json" + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" + "github.com/stretchr/testify/assert" +) + +/* +python: + +import pandas as pd +import pandas_ta as ta + +data = pd.Series([0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9]) +sigma = 6 +offset = 0.9 +size = 5 + +result = ta.alma(data, size, sigma, offset) +print(result) +*/ +func Test_ALMA(t *testing.T) { + var Delta = 0.01 + var randomPrices = []byte(`[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]`) + var input []fixedpoint.Value + if err := json.Unmarshal(randomPrices, &input); err != nil { + panic(err) + } + tests := []struct { + name string + kLines []types.KLine + want float64 + next float64 + all int + }{ + { + name: "random_case", + kLines: buildKLines(input), + want: 5.60785, + next: 4.60785, + all: 26, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + alma := ALMA{ + IntervalWindow: types.IntervalWindow{Window: 5}, + Offset: 0.9, + Sigma: 6, + } + alma.calculateAndUpdate(tt.kLines) + assert.InDelta(t, tt.want, alma.Last(), Delta) + assert.InDelta(t, tt.next, alma.Index(1), Delta) + assert.Equal(t, tt.all, alma.Length()) + }) + } +} diff --git a/pkg/indicator/atr.go b/pkg/indicator/atr.go new file mode 100644 index 0000000000..016eb3a2d4 --- /dev/null +++ b/pkg/indicator/atr.go @@ -0,0 +1,100 @@ +package indicator + +import ( + "math" + "time" + + "github.com/c9s/bbgo/pkg/types" +) + +//go:generate callbackgen -type ATR +type ATR struct { + types.IntervalWindow + PercentageVolatility types.Float64Slice + + PreviousClose float64 + RMA *RMA + + EndTime time.Time + UpdateCallbacks []func(value float64) +} + +func (inc *ATR) Update(high, low, cloze float64) { + if inc.Window <= 0 { + panic("window must be greater than 0") + } + + if inc.RMA == nil { + inc.RMA = &RMA{ + IntervalWindow: types.IntervalWindow{Window: inc.Window}, + Adjust: true, + } + inc.PreviousClose = cloze + return + } + + // calculate true range + trueRange := high - low + hc := math.Abs(high - inc.PreviousClose) + lc := math.Abs(low - inc.PreviousClose) + if trueRange < hc { + trueRange = hc + } + if trueRange < lc { + trueRange = lc + } + + inc.PreviousClose = cloze + + // apply rolling moving average + inc.RMA.Update(trueRange) + atr := inc.RMA.Last() + inc.PercentageVolatility.Push(atr / cloze) +} + +func (inc *ATR) Last() float64 { + if inc.RMA == nil { + return 0 + } + return inc.RMA.Last() +} + +func (inc *ATR) Index(i int) float64 { + if inc.RMA == nil { + return 0 + } + return inc.RMA.Index(i) +} + +func (inc *ATR) Length() int { + if inc.RMA == nil { + return 0 + } + return inc.RMA.Length() +} + +var _ types.Series = &ATR{} + +func (inc *ATR) calculateAndUpdate(kLines []types.KLine) { + for _, k := range kLines { + if inc.EndTime != zeroTime && !k.EndTime.After(inc.EndTime) { + continue + } + inc.Update(k.High.Float64(), k.Low.Float64(), k.Close.Float64()) + } + + inc.EmitUpdate(inc.Last()) + inc.EndTime = kLines[len(kLines)-1].EndTime.Time() +} + +func (inc *ATR) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *ATR) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/atr_callbacks.go b/pkg/indicator/atr_callbacks.go new file mode 100644 index 0000000000..67952ad71c --- /dev/null +++ b/pkg/indicator/atr_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type ATR"; DO NOT EDIT. + +package indicator + +import () + +func (A *ATR) OnUpdate(cb func(value float64)) { + A.UpdateCallbacks = append(A.UpdateCallbacks, cb) +} + +func (A *ATR) EmitUpdate(value float64) { + for _, cb := range A.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/atr_test.go b/pkg/indicator/atr_test.go new file mode 100644 index 0000000000..bbb562b2a2 --- /dev/null +++ b/pkg/indicator/atr_test.go @@ -0,0 +1,72 @@ +package indicator + +import ( + "encoding/json" + "math" + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +/* +python + +import pandas as pd +import pandas_ta as ta + +data = { + "high": [40145.0, 40186.36, 40196.39, 40344.6, 40245.48, 40273.24, 40464.0, 40699.0, 40627.48, 40436.31, 40370.0, 40376.8, 40227.03, 40056.52, 39721.7, 39597.94, 39750.15, 39927.0, 40289.02, 40189.0], + "low": [39870.71, 39834.98, 39866.31, 40108.31, 40016.09, 40094.66, 40105.0, 40196.48, 40154.99, 39800.0, 39959.21, 39922.98, 39940.02, 39632.0, 39261.39, 39254.63, 39473.91, 39555.51, 39819.0, 40006.84], + "close": [40105.78, 39935.23, 40183.97, 40182.03, 40212.26, 40149.99, 40378.0, 40618.37, 40401.03, 39990.39, 40179.13, 40097.23, 40014.72, 39667.85, 39303.1, 39519.99, +39693.79, 39827.96, 40074.94, 40059.84] +} + +high = pd.Series(data['high']) +low = pd.Series(data['low']) +close = pd.Series(data['close']) +result = ta.atr(high, low, close, length=14) +print(result) +*/ +func Test_calculateATR(t *testing.T) { + var bytes = []byte(`{ + "high": [40145.0, 40186.36, 40196.39, 40344.6, 40245.48, 40273.24, 40464.0, 40699.0, 40627.48, 40436.31, 40370.0, 40376.8, 40227.03, 40056.52, 39721.7, 39597.94, 39750.15, 39927.0, 40289.02, 40189.0], + "low": [39870.71, 39834.98, 39866.31, 40108.31, 40016.09, 40094.66, 40105.0, 40196.48, 40154.99, 39800.0, 39959.21, 39922.98, 39940.02, 39632.0, 39261.39, 39254.63, 39473.91, 39555.51, 39819.0, 40006.84], + "close": [40105.78, 39935.23, 40183.97, 40182.03, 40212.26, 40149.99, 40378.0, 40618.37, 40401.03, 39990.39, 40179.13, 40097.23, 40014.72, 39667.85, 39303.1, 39519.99, 39693.79, 39827.96, 40074.94, 40059.84] + }`) + buildKLines := func(bytes []byte) (kLines []types.KLine) { + var prices map[string][]fixedpoint.Value + _ = json.Unmarshal(bytes, &prices) + for i, h := range prices["high"] { + kLine := types.KLine{High: h, Low: prices["low"][i], Close: prices["close"][i]} + kLines = append(kLines, kLine) + } + return kLines + } + + tests := []struct { + name string + kLines []types.KLine + window int + want float64 + }{ + { + name: "test_binance_btcusdt_1h", + kLines: buildKLines(bytes), + window: 14, + want: 367.913903, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + atr := &ATR{IntervalWindow: types.IntervalWindow{Window: tt.window}} + atr.calculateAndUpdate(tt.kLines) + got := atr.Last() + diff := math.Trunc((got-tt.want)*100) / 100 + if diff != 0 { + t.Errorf("calculateATR() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/indicator/boll.go b/pkg/indicator/boll.go index 47fe9e2823..70338be04f 100644 --- a/pkg/indicator/boll.go +++ b/pkg/indicator/boll.go @@ -29,16 +29,34 @@ type BOLL struct { // times of Std, generally it's 2 K float64 - SMA Float64Slice - StdDev Float64Slice - UpBand Float64Slice - DownBand Float64Slice + SMA types.Float64Slice + StdDev types.Float64Slice + UpBand types.Float64Slice + DownBand types.Float64Slice EndTime time.Time updateCallbacks []func(sma, upBand, downBand float64) } +type BandType int + +func (inc *BOLL) GetUpBand() types.Series { + return &inc.UpBand +} + +func (inc *BOLL) GetDownBand() types.Series { + return &inc.DownBand +} + +func (inc *BOLL) GetSMA() types.Series { + return &inc.SMA +} + +func (inc *BOLL) GetStdDev() types.Series { + return &inc.StdDev +} + func (inc *BOLL) LastUpBand() float64 { if len(inc.UpBand) == 0 { return 0.0 @@ -64,10 +82,13 @@ func (inc *BOLL) LastStdDev() float64 { } func (inc *BOLL) LastSMA() float64 { - return inc.SMA[len(inc.SMA)-1] + if len(inc.SMA) > 0 { + return inc.SMA[len(inc.SMA)-1] + } + return 0.0 } -func (inc *BOLL) calculateAndUpdate(kLines []types.KLine) { +func (inc *BOLL) Update(kLines []types.KLine) { if len(kLines) < inc.Window { return } @@ -90,7 +111,7 @@ func (inc *BOLL) calculateAndUpdate(kLines []types.KLine) { var prices []float64 for _, k := range recentK { - prices = append(prices, k.Close) + prices = append(prices, k.Close.Float64()) } var std = stat.StdDev(prices, nil) @@ -105,7 +126,7 @@ func (inc *BOLL) calculateAndUpdate(kLines []types.KLine) { inc.DownBand.Push(downBand) // update end time - inc.EndTime = kLines[index].EndTime + inc.EndTime = kLines[index].EndTime.Time() // log.Infof("update boll: sma=%f, up=%f, down=%f", sma, upBand, downBand) @@ -121,7 +142,7 @@ func (inc *BOLL) handleKLineWindowUpdate(interval types.Interval, window types.K return } - inc.calculateAndUpdate(window) + inc.Update(window) } func (inc *BOLL) Bind(updater KLineWindowUpdater) { diff --git a/pkg/indicator/boll_test.go b/pkg/indicator/boll_test.go new file mode 100644 index 0000000000..7e64ee11a2 --- /dev/null +++ b/pkg/indicator/boll_test.go @@ -0,0 +1,68 @@ +package indicator + +import ( + "encoding/json" + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" + "github.com/stretchr/testify/assert" +) + +/* +python: + +import numpy as np +import pandas as pd + +np.random.seed(1) + +window = 14 +n = 100 + +s = pd.Series(10 + np.sin(2 * np.pi * np.arange(n) / n) + np.random.rand(n)) +print(s.tolist()) + +std = s.rolling(window).std() +ma = s.rolling(window).mean() + +boll_up = ma + std +boll_down = ma - std +print(boll_up) +print(boll_down) +*/ +func TestBOLL(t *testing.T) { + var Delta = 4e-2 + var randomPrices = []byte(`[10.417022004702574, 10.783115012971471, 10.12544760838165, 10.489713887217565, 10.395445777981967, 10.401355589143744, 10.55438476406235, 10.77134001860812, 10.878521148332386, 11.074643528982353, 11.006979766695768, 11.322643490145449, 10.888999355660205, 11.607086063812357, 10.797900835973715, 11.47948450455335, 11.261632727869141, 11.434996508489617, 11.045213991061253, 11.12787797497313, 11.75180108497069, 11.936844736848029, 11.295711428887932, 11.684437316983791, 11.87441588072431, 11.894606663503847, 11.08307093979805, 11.03116948454736, 11.152117670293258, 11.846725664558043, 11.049403350128204, 11.350884110893302, 11.862716582616521, 11.40947196501688, 11.536205039452488, 11.12453262538101, 11.457014170457374, 11.563594299318783, 10.70283538327288, 11.387568304693657, 11.576646341198968, 11.283992449358836, 10.76219766616612, 11.215058620016562, 10.471350559262321, 10.756910520550854, 11.157285390257952, 10.480995462959404, 10.413108572150653, 10.192819091647591, 10.019366957870297, 10.616045013410577, 10.086294882435753, 10.078165344786502, 10.242883272115485, 9.744345550742134, 10.205993052807335, 9.720949283340737, 10.107551862801568, 10.163931565041935, 9.514549176535352, 9.776631998070878, 10.009853051799057, 9.685210642105492, 9.279440216170297, 9.726879411540565, 9.819466719717774, 9.638582432014445, 10.039767703524793, 9.656778554613743, 9.95234539899273, 9.168891543017606, 9.15698909652207, 9.815276587395047, 9.399650108557262, 9.165354197116933, 9.929481851967761, 9.355651158431028, 9.768524852407467, 9.75741482422182, 9.932249574910657, 9.693895721167358, 9.846115381561317, 9.47259166193398, 9.425599966263011, 10.086869223821118, 9.657577947095504, 10.235871419726973, 9.97889439188976, 9.984271730460431, 9.526960720660902, 10.413662463728075, 9.968158459378225, 10.152610322822058, 10.040012250076602, 9.92800998586808, 10.654689633397398, 10.386298172086562, 9.877537093466854, 10.55435439409141]`) + var input []fixedpoint.Value + if err := json.Unmarshal(randomPrices, &input); err != nil { + panic(err) + } + tests := []struct { + name string + kLines []types.KLine + window int + k float64 + up float64 + down float64 + }{ + { + name: "random_case", + kLines: buildKLines(input), + window: 14, + k: 1, + up: 10.421434, + down: 9.772696, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + boll := BOLL{IntervalWindow: types.IntervalWindow{Window: tt.window}, K: tt.k} + boll.Update(tt.kLines) + assert.InDelta(t, tt.up, boll.LastUpBand(), Delta) + assert.InDelta(t, tt.down, boll.LastDownBand(), Delta) + }) + } + +} diff --git a/pkg/indicator/ca_callbacks.go b/pkg/indicator/ca_callbacks.go new file mode 100644 index 0000000000..4883dc6a62 --- /dev/null +++ b/pkg/indicator/ca_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type CA"; DO NOT EDIT. + +package indicator + +import () + +func (inc *CA) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *CA) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/cci.go b/pkg/indicator/cci.go new file mode 100644 index 0000000000..9380ad816b --- /dev/null +++ b/pkg/indicator/cci.go @@ -0,0 +1,105 @@ +package indicator + +import ( + "math" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +// Refer: Commodity Channel Index +// Refer URL: http://www.andrewshamlet.net/2017/07/08/python-tutorial-cci +// with modification of ddof=0 to let standard deviation to be divided by N instead of N-1 +//go:generate callbackgen -type CCI +type CCI struct { + types.IntervalWindow + Input types.Float64Slice + TypicalPrice types.Float64Slice + MA types.Float64Slice + Values types.Float64Slice + + UpdateCallbacks []func(value float64) +} + +func (inc *CCI) Update(value float64) { + if len(inc.TypicalPrice) == 0 { + inc.TypicalPrice.Push(value) + inc.Input.Push(value) + return + } else if len(inc.TypicalPrice) > MaxNumOfEWMA { + inc.TypicalPrice = inc.TypicalPrice[MaxNumOfEWMATruncateSize-1:] + inc.Input = inc.Input[MaxNumOfEWMATruncateSize-1:] + } + + inc.Input.Push(value) + tp := inc.TypicalPrice.Last() - inc.Input.Index(inc.Window) + value + inc.TypicalPrice.Push(tp) + if len(inc.Input) < inc.Window { + return + } + ma := tp / float64(inc.Window) + inc.MA.Push(ma) + if len(inc.MA) > MaxNumOfEWMA { + inc.MA = inc.MA[MaxNumOfEWMATruncateSize-1:] + } + md := 0. + for i := 0; i < inc.Window; i++ { + diff := inc.Input.Index(i) - ma + md += diff * diff + } + md = math.Sqrt(md / float64(inc.Window)) + + cci := (value - ma) / (0.015 * md) + + inc.Values.Push(cci) + if len(inc.Values) > MaxNumOfEWMA { + inc.Values = inc.Values[MaxNumOfEWMATruncateSize-1:] + } +} + +func (inc *CCI) Last() float64 { + if len(inc.Values) == 0 { + return 0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *CCI) Index(i int) float64 { + if i >= len(inc.Values) { + return 0 + } + return inc.Values[len(inc.Values)-1-i] +} + +func (inc *CCI) Length() int { + return len(inc.Values) +} + +var _ types.Series = &CCI{} + +var three = fixedpoint.NewFromInt(3) + +func (inc *CCI) calculateAndUpdate(allKLines []types.KLine) { + if inc.TypicalPrice.Length() == 0 { + for _, k := range allKLines { + inc.Update(k.High.Add(k.Low).Add(k.Close).Div(three).Float64()) + inc.EmitUpdate(inc.Last()) + } + } else { + k := allKLines[len(allKLines)-1] + inc.Update(k.High.Add(k.Low).Add(k.Close).Div(three).Float64()) + inc.EmitUpdate(inc.Last()) + } +} + +func (inc *CCI) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *CCI) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/cci_callbacks.go b/pkg/indicator/cci_callbacks.go new file mode 100644 index 0000000000..52251a1f90 --- /dev/null +++ b/pkg/indicator/cci_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type CCI"; DO NOT EDIT. + +package indicator + +import () + +func (inc *CCI) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *CCI) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/cci_test.go b/pkg/indicator/cci_test.go new file mode 100644 index 0000000000..4aeca6fcab --- /dev/null +++ b/pkg/indicator/cci_test.go @@ -0,0 +1,37 @@ +package indicator + +import ( + "encoding/json" + "testing" + + "github.com/c9s/bbgo/pkg/types" + "github.com/stretchr/testify/assert" +) + +/* +python: + +import pandas as pd +s = pd.Series([0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9]) +cci = pd.Series((s - s.rolling(16).mean()) / (0.015 * s.rolling(16).std(ddof=0)), name="CCI") +print(cci) +*/ +func Test_CCI(t *testing.T) { + var randomPrices = []byte(`[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]`) + var input []float64 + var Delta = 4.3e-2 + if err := json.Unmarshal(randomPrices, &input); err != nil { + panic(err) + } + t.Run("random_case", func(t *testing.T) { + cci := CCI{IntervalWindow: types.IntervalWindow{Window: 16}} + for _, value := range input { + cci.Update(value) + } + + last := cci.Last() + assert.InDelta(t, 93.250481, last, Delta) + assert.InDelta(t, 81.813449, cci.Index(1), Delta) + assert.Equal(t, 50-16+1, cci.Length()) + }) +} diff --git a/pkg/indicator/cma.go b/pkg/indicator/cma.go new file mode 100644 index 0000000000..8040c87072 --- /dev/null +++ b/pkg/indicator/cma.go @@ -0,0 +1,63 @@ +package indicator + +import ( + "github.com/c9s/bbgo/pkg/types" +) + +// Refer: Cumulative Moving Average, Cumulative Average +// Refer: https://en.wikipedia.org/wiki/Moving_average +//go:generate callbackgen -type CA +type CA struct { + Interval types.Interval + Values types.Float64Slice + length float64 + UpdateCallbacks []func(value float64) +} + +func (inc *CA) Update(x float64) { + newVal := (inc.Values.Last()*inc.length + x) / (inc.length + 1.) + inc.length += 1 + inc.Values.Push(newVal) + if len(inc.Values) > MaxNumOfEWMA { + inc.Values = inc.Values[MaxNumOfEWMATruncateSize-1:] + } +} + +func (inc *CA) Last() float64 { + if len(inc.Values) == 0 { + return 0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *CA) Index(i int) float64 { + if i >= len(inc.Values) { + return 0 + } + return inc.Values[len(inc.Values)-1-i] +} + +func (inc *CA) Length() int { + return len(inc.Values) +} + +var _ types.Series = &CA{} + +func (inc *CA) calculateAndUpdate(allKLines []types.KLine) { + for _, k := range allKLines { + inc.Update(k.Close.Float64()) + inc.EmitUpdate(inc.Last()) + } +} + +func (inc *CA) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *CA) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/dema.go b/pkg/indicator/dema.go new file mode 100644 index 0000000000..bc476134a2 --- /dev/null +++ b/pkg/indicator/dema.go @@ -0,0 +1,73 @@ +package indicator + +import ( + "github.com/c9s/bbgo/pkg/types" +) + +// Refer: Double Exponential Moving Average +// Refer URL: https://investopedia.com/terms/d/double-exponential-moving-average.asp + +//go:generate callbackgen -type DEMA +type DEMA struct { + types.IntervalWindow + Values types.Float64Slice + a1 *EWMA + a2 *EWMA + + UpdateCallbacks []func(value float64) +} + +func (inc *DEMA) Update(value float64) { + if len(inc.Values) == 0 { + inc.a1 = &EWMA{IntervalWindow: types.IntervalWindow{inc.Interval, inc.Window}} + inc.a2 = &EWMA{IntervalWindow: types.IntervalWindow{inc.Interval, inc.Window}} + } + + inc.a1.Update(value) + inc.a2.Update(inc.a1.Last()) + inc.Values.Push(2*inc.a1.Last() - inc.a2.Last()) + if len(inc.Values) > MaxNumOfEWMA { + inc.Values = inc.Values[MaxNumOfEWMATruncateSize-1:] + } +} + +func (inc *DEMA) Last() float64 { + return inc.Values.Last() +} + +func (inc *DEMA) Index(i int) float64 { + if len(inc.Values)-i-1 >= 0 { + return inc.Values[len(inc.Values)-1-i] + } + return 0 +} + +func (inc *DEMA) Length() int { + return len(inc.Values) +} + +var _ types.Series = &DEMA{} + +func (inc *DEMA) calculateAndUpdate(allKLines []types.KLine) { + if inc.a1 == nil { + for _, k := range allKLines { + inc.Update(k.Close.Float64()) + inc.EmitUpdate(inc.Last()) + } + } else { + inc.Update(allKLines[len(allKLines)-1].Close.Float64()) + inc.EmitUpdate(inc.Last()) + } +} + +func (inc *DEMA) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *DEMA) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/dema_callbacks.go b/pkg/indicator/dema_callbacks.go new file mode 100644 index 0000000000..e7c4f6676e --- /dev/null +++ b/pkg/indicator/dema_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type DEMA"; DO NOT EDIT. + +package indicator + +import () + +func (inc *DEMA) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *DEMA) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/dema_test.go b/pkg/indicator/dema_test.go new file mode 100644 index 0000000000..c58429672c --- /dev/null +++ b/pkg/indicator/dema_test.go @@ -0,0 +1,55 @@ +package indicator + +import ( + "encoding/json" + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" + "github.com/stretchr/testify/assert" +) + +/* +python: + +import pandas as pd +s = pd.Series([0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9]) +ma1 = s.ewm(span=16).mean() +ma2 = ma1.ewm(span=16).mean() +result = (2 * ma1 - ma2) +print(result) +*/ +func Test_DEMA(t *testing.T) { + var Delta = 4e-2 + var randomPrices = []byte(`[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]`) + var input []fixedpoint.Value + if err := json.Unmarshal(randomPrices, &input); err != nil { + panic(err) + } + tests := []struct { + name string + kLines []types.KLine + want float64 + next float64 + all int + }{ + { + name: "random_case", + kLines: buildKLines(input), + want: 6.420838, + next: 5.609367, + all: 50, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + dema := DEMA{IntervalWindow: types.IntervalWindow{Window: 16}} + dema.calculateAndUpdate(tt.kLines) + last := dema.Last() + assert.InDelta(t, tt.want, last, Delta) + assert.InDelta(t, tt.next, dema.Index(1), Delta) + assert.Equal(t, tt.all, dema.Length()) + }) + } +} diff --git a/pkg/indicator/dmi.go b/pkg/indicator/dmi.go new file mode 100644 index 0000000000..cb0fc71691 --- /dev/null +++ b/pkg/indicator/dmi.go @@ -0,0 +1,113 @@ +package indicator + +import ( + "math" + + "github.com/c9s/bbgo/pkg/types" +) + +// Refer: https://www.investopedia.com/terms/d/dmi.asp +// Refer: https://github.com/twopirllc/pandas-ta/blob/main/pandas_ta/trend/adx.py +// +// Directional Movement Index +// an indicator developed by J. Welles Wilder in 1978 that identifies in which +// direction the price of an asset is moving. +//go:generate callbackgen -type DMI +type DMI struct { + types.IntervalWindow + ADXSmoothing int + atr *ATR + DMP types.UpdatableSeries + DMN types.UpdatableSeries + DIPlus *types.Queue + DIMinus *types.Queue + ADX types.UpdatableSeries + PrevHigh, PrevLow float64 + UpdateCallbacks []func(diplus, diminus, adx float64) +} + +func (inc *DMI) Update(high, low, cloze float64) { + if inc.DMP == nil || inc.DMN == nil { + inc.DMP = &RMA{IntervalWindow: inc.IntervalWindow, Adjust: true} + inc.DMN = &RMA{IntervalWindow: inc.IntervalWindow, Adjust: true} + inc.ADX = &RMA{IntervalWindow: types.IntervalWindow{Window: inc.ADXSmoothing}, Adjust: true} + } + if inc.atr == nil { + inc.atr = &ATR{IntervalWindow: inc.IntervalWindow} + inc.atr.Update(high, low, cloze) + inc.PrevHigh = high + inc.PrevLow = low + inc.DIPlus = types.NewQueue(500) + inc.DIMinus = types.NewQueue(500) + return + } + inc.atr.Update(high, low, cloze) + up := high - inc.PrevHigh + dn := inc.PrevLow - low + inc.PrevHigh = high + inc.PrevLow = low + pos := 0.0 + if up > dn && up > 0. { + pos = up + } + + neg := 0.0 + if dn > up && dn > 0. { + neg = dn + } + + inc.DMP.Update(pos) + inc.DMN.Update(neg) + if inc.atr.Length() < inc.Window { + return + } + k := 100. / inc.atr.Last() + dmp := inc.DMP.Last() + dmn := inc.DMN.Last() + inc.DIPlus.Update(k * dmp) + inc.DIMinus.Update(k * dmn) + dx := 100. * math.Abs(dmp-dmn) / (dmp + dmn) + inc.ADX.Update(dx) + +} + +func (inc *DMI) GetDIPlus() types.Series { + return inc.DIPlus +} + +func (inc *DMI) GetDIMinus() types.Series { + return inc.DIMinus +} + +func (inc *DMI) GetADX() types.Series { + return inc.ADX +} + +func (inc *DMI) Length() int { + return inc.ADX.Length() +} + +func (inc *DMI) calculateAndUpdate(allKLines []types.KLine) { + if inc.ADX == nil { + for _, k := range allKLines { + inc.Update(k.High.Float64(), k.Low.Float64(), k.Close.Float64()) + inc.EmitUpdate(inc.DIPlus.Last(), inc.DIMinus.Last(), inc.ADX.Last()) + } + } else { + k := allKLines[len(allKLines)-1] + inc.Update(k.High.Float64(), k.Low.Float64(), k.Close.Float64()) + inc.EmitUpdate(inc.DIPlus.Last(), inc.DIMinus.Last(), inc.ADX.Last()) + } +} + +func (inc *DMI) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *DMI) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/dmi_callbacks.go b/pkg/indicator/dmi_callbacks.go new file mode 100644 index 0000000000..93e8dd14df --- /dev/null +++ b/pkg/indicator/dmi_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type DMI"; DO NOT EDIT. + +package indicator + +import () + +func (inc *DMI) OnUpdate(cb func(diplus float64, diminus float64, adx float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *DMI) EmitUpdate(diplus float64, diminus float64, adx float64) { + for _, cb := range inc.UpdateCallbacks { + cb(diplus, diminus, adx) + } +} diff --git a/pkg/indicator/dmi_test.go b/pkg/indicator/dmi_test.go new file mode 100644 index 0000000000..62d2b0aa70 --- /dev/null +++ b/pkg/indicator/dmi_test.go @@ -0,0 +1,87 @@ +package indicator + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" + "github.com/stretchr/testify/assert" +) + +/* +python: + +import pandas as pd +import pandas_ta as ta + +data = pd.Series([0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9]) + +high = pd.Series([100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109]) + +low = pd.Series([80,81,82,83,84,85,86,87,88,89,80,81,82,83,84,85,86,87,88,89,80,81,82,83,84,85,86,87,88,89]) + +close = pd.Series([90,91,92,93,94,95,96,97,98,99,90,91,92,93,94,95,96,97,98,99,90,91,92,93,94,95,96,97,98,99]) + +result = ta.adx(high, low, close, 5, 14) +print(result['ADX_14']) + +print(result['DMP_5']) +print(result['DMN_5']) +*/ +func Test_DMI(t *testing.T) { + var Delta = 0.001 + var highb = []byte(`[100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109]`) + var lowb = []byte(`[80,81,82,83,84,85,86,87,88,89,80,81,82,83,84,85,86,87,88,89,80,81,82,83,84,85,86,87,88,89]`) + var clozeb = []byte(`[90,91,92,93,94,95,96,97,98,99,90,91,92,93,94,95,96,97,98,99,90,91,92,93,94,95,96,97,98,99]`) + + buildKLines := func(h, l, c []byte) (klines []types.KLine) { + var hv, cv, lv []fixedpoint.Value + _ = json.Unmarshal(h, &hv) + _ = json.Unmarshal(l, &lv) + _ = json.Unmarshal(c, &cv) + if len(hv) != len(lv) || len(lv) != len(cv) { + panic(fmt.Sprintf("length not equal %v %v %v", len(hv), len(lv), len(cv))) + } + for i, hh := range hv { + kline := types.KLine{High: hh, Low: lv[i], Close: cv[i]} + klines = append(klines, kline) + } + return klines + } + + type output struct { + dip float64 + dim float64 + adx float64 + } + + tests := []struct { + name string + klines []types.KLine + want output + next output + total int + }{ + { + name: "test_dmi", + klines: buildKLines(highb, lowb, clozeb), + want: output{dip: 4.85114, dim: 1.339736, adx: 37.857156}, + next: output{dip: 4.813853, dim: 1.67532, adx: 36.111434}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + dmi := &DMI{ + IntervalWindow: types.IntervalWindow{Window: 5}, + ADXSmoothing: 14, + } + dmi.calculateAndUpdate(tt.klines) + assert.InDelta(t, dmi.GetDIPlus().Last(), tt.want.dip, Delta) + assert.InDelta(t, dmi.GetDIMinus().Last(), tt.want.dim, Delta) + assert.InDelta(t, dmi.GetADX().Last(), tt.want.adx, Delta) + }) + } + +} diff --git a/pkg/indicator/drift.go b/pkg/indicator/drift.go new file mode 100644 index 0000000000..bda5b51d5f --- /dev/null +++ b/pkg/indicator/drift.go @@ -0,0 +1,91 @@ +package indicator + +import ( + "math" + + "github.com/c9s/bbgo/pkg/types" +) + +// Refer: https://tradingview.com/script/aDymGrFx-Drift-Study-Inspired-by-Monte-Carlo-Simulations-with-BM-KL/ +// Brownian Motion's drift factor +// could be used in Monte Carlo Simulations +//go:generate callbackgen -type Drift +type Drift struct { + types.IntervalWindow + chng *types.Queue + Values types.Float64Slice + SMA *SMA + LastValue float64 + + UpdateCallbacks []func(value float64) +} + +func (inc *Drift) Update(value float64) { + if inc.chng == nil { + inc.SMA = &SMA{IntervalWindow: types.IntervalWindow{Interval: inc.Interval, Window: inc.Window}} + inc.chng = types.NewQueue(inc.Window) + inc.LastValue = value + return + } + var chng float64 + if value == 0 { + chng = 0 + } else { + chng = math.Log(value / inc.LastValue) + inc.LastValue = value + } + inc.SMA.Update(chng) + inc.chng.Update(chng) + if inc.chng.Length() >= inc.Window { + stdev := types.Stdev(inc.chng, inc.Window) + drift := inc.SMA.Last() - stdev*stdev*0.5 + inc.Values.Push(drift) + } +} + +func (inc *Drift) Index(i int) float64 { + if inc.Values == nil { + return 0 + } + return inc.Values.Index(i) +} + +func (inc *Drift) Last() float64 { + if inc.Values.Length() == 0 { + return 0 + } + return inc.Values.Last() +} + +func (inc *Drift) Length() int { + if inc.Values == nil { + return 0 + } + return inc.Values.Length() +} + +var _ types.Series = &Drift{} + +func (inc *Drift) calculateAndUpdate(allKLines []types.KLine) { + if inc.chng == nil { + for _, k := range allKLines { + inc.Update(k.Close.Float64()) + inc.EmitUpdate(inc.Last()) + } + } else { + inc.Update(allKLines[len(allKLines)-1].Close.Float64()) + inc.EmitUpdate(inc.Last()) + } +} + +func (inc *Drift) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *Drift) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/drift_callbacks.go b/pkg/indicator/drift_callbacks.go new file mode 100644 index 0000000000..224ef74a4a --- /dev/null +++ b/pkg/indicator/drift_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type Drift"; DO NOT EDIT. + +package indicator + +import () + +func (inc *Drift) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *Drift) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/drift_test.go b/pkg/indicator/drift_test.go new file mode 100644 index 0000000000..38d6a732a3 --- /dev/null +++ b/pkg/indicator/drift_test.go @@ -0,0 +1,40 @@ +package indicator + +import ( + "encoding/json" + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" + "github.com/stretchr/testify/assert" +) + +func Test_Drift(t *testing.T) { + var randomPrices = []byte(`[1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9]`) + var input []fixedpoint.Value + if err := json.Unmarshal(randomPrices, &input); err != nil { + panic(err) + } + tests := []struct { + name string + kLines []types.KLine + all int + }{ + { + name: "random_case", + kLines: buildKLines(input), + all: 47, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + drift := Drift{IntervalWindow: types.IntervalWindow{Window: 3}} + drift.calculateAndUpdate(tt.kLines) + assert.Equal(t, drift.Length(), tt.all) + for _, v := range drift.Values { + assert.LessOrEqual(t, v, 1.0) + } + }) + } +} diff --git a/pkg/indicator/emv.go b/pkg/indicator/emv.go new file mode 100644 index 0000000000..08d439e45b --- /dev/null +++ b/pkg/indicator/emv.go @@ -0,0 +1,88 @@ +package indicator + +import ( + "github.com/c9s/bbgo/pkg/types" +) + +// Refer: Ease of Movement +// Refer URL: https://www.investopedia.com/terms/e/easeofmovement.asp + +//go:generate callbackgen -type EMV +type EMV struct { + types.IntervalWindow + prevH float64 + prevL float64 + Values *SMA + EMVScale float64 + + UpdateCallbacks []func(value float64) +} + +const DefaultEMVScale float64 = 100000000. + +func (inc *EMV) Update(high, low, vol float64) { + if inc.EMVScale == 0 { + inc.EMVScale = DefaultEMVScale + } + if inc.prevH == 0 || inc.Values == nil { + inc.prevH = high + inc.prevL = low + inc.Values = &SMA{IntervalWindow: inc.IntervalWindow} + return + } + distanceMoved := (high+low)/2. - (inc.prevH+inc.prevL)/2. + boxRatio := vol / inc.EMVScale / (high - low) + result := distanceMoved / boxRatio + inc.prevH = high + inc.prevL = low + inc.Values.Update(result) +} + +func (inc *EMV) Index(i int) float64 { + if inc.Values == nil { + return 0 + } + return inc.Values.Index(i) +} + +func (inc *EMV) Last() float64 { + if inc.Values == nil { + return 0 + } + return inc.Values.Last() +} + +func (inc *EMV) Length() int { + if inc.Values == nil { + return 0 + } + return inc.Values.Length() +} + +var _ types.Series = &EMV{} + +func (inc *EMV) calculateAndUpdate(allKLines []types.KLine) { + if inc.Values == nil { + for _, k := range allKLines { + inc.Update(k.High.Float64(), k.Low.Float64(), k.Volume.Float64()) + if inc.Length() > 0 { + inc.EmitUpdate(inc.Last()) + } + } + } else { + k := allKLines[len(allKLines)-1] + inc.Update(k.High.Float64(), k.Low.Float64(), k.Volume.Float64()) + inc.EmitUpdate(inc.Last()) + } +} + +func (inc *EMV) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + inc.calculateAndUpdate(window) +} + +func (inc *EMV) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/emv_callbacks.go b/pkg/indicator/emv_callbacks.go new file mode 100644 index 0000000000..89afd8a998 --- /dev/null +++ b/pkg/indicator/emv_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type EMV"; DO NOT EDIT. + +package indicator + +import () + +func (inc *EMV) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *EMV) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/emv_test.go b/pkg/indicator/emv_test.go new file mode 100644 index 0000000000..fd9054f5f2 --- /dev/null +++ b/pkg/indicator/emv_test.go @@ -0,0 +1,34 @@ +package indicator + +import ( + "testing" + + "github.com/c9s/bbgo/pkg/types" + "github.com/stretchr/testify/assert" +) + +// data from https://school.stockcharts.com/doku.php?id=technical_indicators:ease_of_movement_emv +func Test_EMV(t *testing.T) { + var Delta = 0.01 + emv := &EMV{ + EMVScale: 100000000, + IntervalWindow: types.IntervalWindow{Window: 14}, + } + emv.Update(63.74, 62.63, 32178836) + emv.Update(64.51, 63.85, 36461672) + assert.InDelta(t, 1.8, emv.Values.Cache.Last(), Delta) + emv.Update(64.57, 63.81, 51372680) + emv.Update(64.31, 62.62, 42476356) + emv.Update(63.43, 62.73, 29504176) + emv.Update(62.85, 61.95, 33098600) + emv.Update(62.70, 62.06, 30577960) + emv.Update(63.18, 62.69, 35693928) + emv.Update(62.47, 61.54, 49768136) + emv.Update(64.16, 63.21, 44759968) + emv.Update(64.38, 63.87, 33425504) + emv.Update(64.89, 64.29, 15895085) + emv.Update(65.25, 64.48, 37015388) + emv.Update(64.69, 63.65, 40672116) + emv.Update(64.26, 63.68, 35627200) + assert.InDelta(t, -0.03, emv.Last(), Delta) +} diff --git a/pkg/indicator/ewma.go b/pkg/indicator/ewma.go index 86b332e37a..d94fb7953e 100644 --- a/pkg/indicator/ewma.go +++ b/pkg/indicator/ewma.go @@ -9,15 +9,33 @@ import ( "github.com/c9s/bbgo/pkg/types" ) +// These numbers should be aligned with bbgo MaxNumOfKLines and MaxNumOfKLinesTruncate +const MaxNumOfEWMA = 5_000 +const MaxNumOfEWMATruncateSize = 100 + //go:generate callbackgen -type EWMA type EWMA struct { types.IntervalWindow - Values Float64Slice + Values types.Float64Slice LastOpenTime time.Time UpdateCallbacks []func(value float64) } +func (inc *EWMA) Update(value float64) { + var multiplier = 2.0 / float64(1+inc.Window) + + if len(inc.Values) == 0 { + inc.Values.Push(value) + return + } else if len(inc.Values) > MaxNumOfEWMA { + inc.Values = inc.Values[MaxNumOfEWMATruncateSize-1:] + } + + ema := (1-multiplier)*inc.Last() + multiplier*value + inc.Values.Push(ema) +} + func (inc *EWMA) Last() float64 { if len(inc.Values) == 0 { return 0 @@ -26,6 +44,18 @@ func (inc *EWMA) Last() float64 { return inc.Values[len(inc.Values)-1] } +func (inc *EWMA) Index(i int) float64 { + if i >= len(inc.Values) { + return 0 + } + + return inc.Values[len(inc.Values)-1-i] +} + +func (inc *EWMA) Length() int { + return len(inc.Values) +} + func (inc *EWMA) calculateAndUpdate(allKLines []types.KLine) { if len(allKLines) < inc.Window { // we can't calculate @@ -36,35 +66,40 @@ func (inc *EWMA) calculateAndUpdate(allKLines []types.KLine) { var dataLen = len(allKLines) var multiplier = 2.0 / (float64(inc.Window) + 1) - // init the values from the kline data - var from = 1 + // init the values fromNthK the kline data + var fromNthK = 1 if len(inc.Values) == 0 { // for the first value, we should use the close price inc.Values = []float64{priceF(allKLines[0])} } else { - // from = len(inc.Values) + if len(inc.Values) >= MaxNumOfEWMA { + inc.Values = inc.Values[MaxNumOfEWMATruncateSize-1:] + } + + fromNthK = len(inc.Values) // update ewma with the existing values for i := dataLen - 1; i > 0; i-- { var k = allKLines[i] if k.StartTime.After(inc.LastOpenTime) { - from = i + fromNthK = i } else { break } } } - for i := from; i < dataLen; i++ { + for i := fromNthK; i < dataLen; i++ { var k = allKLines[i] var ewma = priceF(k)*multiplier + (1-multiplier)*inc.Values[i-1] inc.Values.Push(ewma) - inc.LastOpenTime = k.StartTime + inc.LastOpenTime = k.StartTime.Time() inc.EmitUpdate(ewma) } if len(inc.Values) != dataLen { - log.Warnf("%s EMA (%d) value length (%d) != all kline data length (%d)", inc.Interval, inc.Window, len(inc.Values), dataLen) + // check error + log.Warnf("%s EMA (%d) value length (%d) != kline window length (%d)", inc.Interval, inc.Window, len(inc.Values), dataLen) } v1 := math.Floor(inc.Values[len(inc.Values)-1]*100.0) / 100.0 @@ -89,28 +124,6 @@ func ewma(prices []float64, multiplier float64) float64 { return prices[end]*multiplier + (1-multiplier)*ewma(prices[:end], multiplier) } -type KLinePriceMapper func(k types.KLine) float64 - -func KLineOpenPriceMapper(k types.KLine) float64 { - return k.Open -} - -func KLineClosePriceMapper(k types.KLine) float64 { - return k.Close -} - -func MapKLinePrice(kLines []types.KLine, f KLinePriceMapper) (prices []float64) { - for _, k := range kLines { - prices = append(prices, f(k)) - } - - return prices -} - -type KLineWindowUpdater interface { - OnKLineWindowUpdate(func(interval types.Interval, window types.KLineWindow)) -} - func (inc *EWMA) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { if inc.Interval != interval { return @@ -122,3 +135,5 @@ func (inc *EWMA) handleKLineWindowUpdate(interval types.Interval, window types.K func (inc *EWMA) Bind(updater KLineWindowUpdater) { updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) } + +var _ types.Series = &EWMA{} diff --git a/pkg/indicator/ewma_test.go b/pkg/indicator/ewma_test.go index d752b5f0c6..f781f251ca 100644 --- a/pkg/indicator/ewma_test.go +++ b/pkg/indicator/ewma_test.go @@ -1,9 +1,11 @@ package indicator import ( + "encoding/json" "math" "testing" + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" ) @@ -11,7 +13,7 @@ import ( // 2020/12/05 10:25 // curl -s 'https://www.binance.com/api/v3/klines?symbol=ETHUSDT&interval=5m&endTime=1607135400000&limit=1000' | jq '. | map({ closePrice: (.[4] | tonumber), openTime: .[0] })' // curl -s 'https://www.binance.com/api/v3/klines?symbol=ETHUSDT&interval=5m&endTime=1607135400000&limit=1000' | jq '. | map(.[4] | tonumber)' -var ethusdt5m = []float64{ +var ethusdt5m = []byte(`[ 614.36, 613.62, 611.68, @@ -1011,10 +1013,10 @@ var ethusdt5m = []float64{ 572.85, 572.21, 572.63, - 572.74, -} + 572.74 +]`) -func buildKLines(prices []float64) (klines []types.KLine) { +func buildKLines(prices []fixedpoint.Value) (klines []types.KLine) { for _, p := range prices { klines = append(klines, types.KLine{Close: p}) } @@ -1028,6 +1030,10 @@ func Test_calculateEWMA(t *testing.T) { priceF KLinePriceMapper window int } + var input []fixedpoint.Value + if err := json.Unmarshal(ethusdt5m, &input); err != nil { + panic(err) + } tests := []struct { name string args args @@ -1036,7 +1042,7 @@ func Test_calculateEWMA(t *testing.T) { { name: "ETHUSDT EMA 7", args: args{ - allKLines: buildKLines(ethusdt5m), + allKLines: buildKLines(input), priceF: KLineClosePriceMapper, window: 7, }, @@ -1045,7 +1051,7 @@ func Test_calculateEWMA(t *testing.T) { { name: "ETHUSDT EMA 25", args: args{ - allKLines: buildKLines(ethusdt5m), + allKLines: buildKLines(input), priceF: KLineClosePriceMapper, window: 25, }, @@ -1054,7 +1060,7 @@ func Test_calculateEWMA(t *testing.T) { { name: "ETHUSDT EMA 99", args: args{ - allKLines: buildKLines(ethusdt5m), + allKLines: buildKLines(input), priceF: KLineClosePriceMapper, window: 99, }, diff --git a/pkg/indicator/hull.go b/pkg/indicator/hull.go new file mode 100644 index 0000000000..0c8347f9b5 --- /dev/null +++ b/pkg/indicator/hull.go @@ -0,0 +1,82 @@ +package indicator + +import ( + "math" + + "github.com/c9s/bbgo/pkg/types" +) + +// Refer: Hull Moving Average +// Refer URL: https://fidelity.com/learning-center/trading-investing/technical-analysis/technical-indicator-guide/hull-moving-average +//go:generate callbackgen -type HULL +type HULL struct { + types.IntervalWindow + ma1 *EWMA + ma2 *EWMA + result *EWMA + + UpdateCallbacks []func(value float64) +} + +func (inc *HULL) Update(value float64) { + if inc.result == nil { + inc.ma1 = &EWMA{IntervalWindow: types.IntervalWindow{inc.Interval, inc.Window / 2}} + inc.ma2 = &EWMA{IntervalWindow: types.IntervalWindow{inc.Interval, inc.Window}} + inc.result = &EWMA{IntervalWindow: types.IntervalWindow{inc.Interval, int(math.Sqrt(float64(inc.Window)))}} + } + inc.ma1.Update(value) + inc.ma2.Update(value) + inc.result.Update(2*inc.ma1.Last() - inc.ma2.Last()) +} + +func (inc *HULL) Last() float64 { + if inc.result == nil { + return 0 + } + return inc.result.Last() +} + +func (inc *HULL) Index(i int) float64 { + if inc.result == nil { + return 0 + } + return inc.result.Index(i) +} + +func (inc *HULL) Length() int { + if inc.result == nil { + return 0 + } + return inc.result.Length() +} + +var _ types.Series = &HULL{} + +// TODO: should we just ignore the possible overlapping? +func (inc *HULL) calculateAndUpdate(allKLines []types.KLine) { + doable := false + if inc.ma1 == nil || inc.ma1.Length() == 0 { + doable = true + } + for _, k := range allKLines { + if !doable && k.StartTime.After(inc.ma1.LastOpenTime) { + doable = true + } + if doable { + inc.Update(k.Close.Float64()) + inc.EmitUpdate(inc.Last()) + } + } +} + +func (inc *HULL) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *HULL) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/hull_callbacks.go b/pkg/indicator/hull_callbacks.go new file mode 100644 index 0000000000..aa95c8dd96 --- /dev/null +++ b/pkg/indicator/hull_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type HULL"; DO NOT EDIT. + +package indicator + +import () + +func (inc *HULL) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *HULL) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/hull_test.go b/pkg/indicator/hull_test.go new file mode 100644 index 0000000000..95f883cd8b --- /dev/null +++ b/pkg/indicator/hull_test.go @@ -0,0 +1,55 @@ +package indicator + +import ( + "encoding/json" + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" + "github.com/stretchr/testify/assert" +) + +/* +python: + +import pandas as pd +s = pd.Series([0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9]) +ma1 = s.ewm(span=8).mean() +ma2 = s.ewm(span=16).mean() +result = (2 * ma1 - ma2).ewm(span=4).mean() +print(result) +*/ +func Test_HULL(t *testing.T) { + var Delta = 1.5e-2 + var randomPrices = []byte(`[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]`) + var input []fixedpoint.Value + if err := json.Unmarshal(randomPrices, &input); err != nil { + panic(err) + } + tests := []struct { + name string + kLines []types.KLine + want float64 + next float64 + all int + }{ + { + name: "random_case", + kLines: buildKLines(input), + want: 6.002935, + next: 5.167056, + all: 50, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + hull := HULL{IntervalWindow: types.IntervalWindow{Window: 16}} + hull.calculateAndUpdate(tt.kLines) + last := hull.Last() + assert.InDelta(t, tt.want, last, Delta) + assert.InDelta(t, tt.next, hull.Index(1), Delta) + assert.Equal(t, tt.all, hull.Length()) + }) + } +} diff --git a/pkg/indicator/line.go b/pkg/indicator/line.go new file mode 100644 index 0000000000..763d58f89e --- /dev/null +++ b/pkg/indicator/line.go @@ -0,0 +1,76 @@ +package indicator + +import ( + "time" + + "github.com/c9s/bbgo/pkg/types" +) + +// Line indicator is a utility that helps to simulate either the +// 1. trend +// 2. support +// 3. resistance +// of the market data, defined with series interface +type Line struct { + types.IntervalWindow + start float64 + end float64 + startIndex int + endIndex int + currentTime time.Time + Interval types.Interval +} + +func (l *Line) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if interval != l.Interval { + return + } + newTime := window.Last().EndTime.Time() + delta := int(newTime.Sub(l.currentTime).Minutes()) / l.Interval.Minutes() + l.startIndex += delta + l.endIndex += delta + l.currentTime = newTime +} + +func (l *Line) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(l.handleKLineWindowUpdate) +} + +func (l *Line) Last() float64 { + return (l.end-l.start)/float64(l.startIndex-l.endIndex)*float64(l.endIndex) + l.end +} + +func (l *Line) Index(i int) float64 { + return (l.end-l.start)/float64(l.startIndex-l.endIndex)*float64(l.endIndex-i) + l.end +} + +func (l *Line) Length() int { + if l.startIndex > l.endIndex { + return l.startIndex - l.endIndex + } else { + return l.endIndex - l.startIndex + } +} + +func (l *Line) SetXY1(index int, value float64) { + l.startIndex = index + l.start = value +} + +func (l *Line) SetXY2(index int, value float64) { + l.endIndex = index + l.end = value +} + +func NewLine(startIndex int, startValue float64, endIndex int, endValue float64, interval types.Interval) *Line { + return &Line{ + start: startValue, + end: endValue, + startIndex: startIndex, + endIndex: endIndex, + currentTime: time.Time{}, + Interval: interval, + } +} + +var _ types.Series = &Line{} diff --git a/pkg/indicator/macd.go b/pkg/indicator/macd.go new file mode 100644 index 0000000000..3dfbd6d450 --- /dev/null +++ b/pkg/indicator/macd.go @@ -0,0 +1,118 @@ +package indicator + +import ( + "time" + + "github.com/c9s/bbgo/pkg/types" +) + +/* +macd implements moving average convergence divergence indicator + +Moving Average Convergence Divergence (MACD) +- https://www.investopedia.com/terms/m/macd.asp +*/ + +//go:generate callbackgen -type MACD +type MACD struct { + types.IntervalWindow // 9 + ShortPeriod int // 12 + LongPeriod int // 26 + Values types.Float64Slice + FastEWMA EWMA + SlowEWMA EWMA + SignalLine EWMA + Histogram types.Float64Slice + + EndTime time.Time + + UpdateCallbacks []func(value float64) +} + +func (inc *MACD) Update(x float64) { + if len(inc.Values) == 0 { + inc.FastEWMA = EWMA{IntervalWindow: types.IntervalWindow{Window: inc.ShortPeriod}} + inc.SlowEWMA = EWMA{IntervalWindow: types.IntervalWindow{Window: inc.LongPeriod}} + inc.SignalLine = EWMA{IntervalWindow: types.IntervalWindow{Window: inc.Window}} + } + + // update fast and slow ema + inc.FastEWMA.Update(x) + inc.SlowEWMA.Update(x) + + // update macd + macd := inc.FastEWMA.Last() - inc.SlowEWMA.Last() + inc.Values.Push(macd) + + // update signal line + inc.SignalLine.Update(macd) + + // update histogram + inc.Histogram.Push(macd - inc.SignalLine.Last()) +} + +func (inc *MACD) calculateMACD(kLines []types.KLine, priceF KLinePriceMapper) float64 { + for _, kline := range kLines { + inc.Update(kline.Close.Float64()) + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *MACD) calculateAndUpdate(kLines []types.KLine) { + if len(kLines) == 0 { + return + } + + for _, k := range kLines { + if inc.EndTime != zeroTime && !k.EndTime.After(inc.EndTime) { + continue + } + inc.Update(k.Close.Float64()) + } + + inc.EmitUpdate(inc.Values[len(inc.Values)-1]) + inc.EndTime = kLines[len(kLines)-1].EndTime.Time() +} + +func (inc *MACD) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *MACD) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +type MACDValues struct { + *MACD +} + +func (inc *MACDValues) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *MACDValues) Index(i int) float64 { + length := len(inc.Values) + if length == 0 || length-1-i < 0 { + return 0.0 + } + return inc.Values[length-1+i] +} + +func (inc *MACDValues) Length() int { + return len(inc.Values) +} + +func (inc *MACD) MACD() types.Series { + return &MACDValues{inc} +} + +func (inc *MACD) Singals() types.Series { + return &inc.SignalLine +} diff --git a/pkg/indicator/macd_callbacks.go b/pkg/indicator/macd_callbacks.go new file mode 100644 index 0000000000..a368fa625d --- /dev/null +++ b/pkg/indicator/macd_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type MACD"; DO NOT EDIT. + +package indicator + +import () + +func (inc *MACD) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *MACD) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/macd_test.go b/pkg/indicator/macd_test.go new file mode 100644 index 0000000000..6cf074fff6 --- /dev/null +++ b/pkg/indicator/macd_test.go @@ -0,0 +1,52 @@ +package indicator + +import ( + "encoding/json" + "math" + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +/* +python: + +import pandas as pd +s = pd.Series([0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9]) +slow = s.ewm(span=26, adjust=False).mean() +fast = s.ewm(span=12, adjust=False).mean() +print(fast - slow) +*/ + +func Test_calculateMACD(t *testing.T) { + var randomPrices = []byte(`[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]`) + var input []fixedpoint.Value + if err := json.Unmarshal(randomPrices, &input); err != nil { + panic(err) + } + tests := []struct { + name string + kLines []types.KLine + want float64 + }{ + { + name: "random_case", + kLines: buildKLines(input), + want: 0.7967670223776384, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + iw := types.IntervalWindow{Window: 9} + macd := MACD{IntervalWindow: iw, ShortPeriod: 12, LongPeriod: 26} + priceF := KLineClosePriceMapper + got := macd.calculateMACD(tt.kLines, priceF) + diff := math.Trunc((got-tt.want)*100) / 100 + if diff != 0 { + t.Errorf("calculateMACD() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/indicator/obv.go b/pkg/indicator/obv.go new file mode 100644 index 0000000000..3ea11772da --- /dev/null +++ b/pkg/indicator/obv.go @@ -0,0 +1,67 @@ +package indicator + +import ( + "time" + + "github.com/c9s/bbgo/pkg/types" +) + +/* +obv implements on-balance volume indicator + +On-Balance Volume (OBV) Definition +- https://www.investopedia.com/terms/o/onbalancevolume.asp +*/ +//go:generate callbackgen -type OBV +type OBV struct { + types.IntervalWindow + Values types.Float64Slice + PrePrice float64 + + EndTime time.Time + UpdateCallbacks []func(value float64) +} + +func (inc *OBV) Update(price, volume float64) { + if len(inc.Values) == 0 { + inc.PrePrice = price + inc.Values.Push(volume) + return + } + + if volume < inc.PrePrice { + inc.Values.Push(inc.Last() - volume) + } else { + inc.Values.Push(inc.Last() + volume) + } +} + +func (inc *OBV) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *OBV) calculateAndUpdate(kLines []types.KLine) { + for _, k := range kLines { + if inc.EndTime != zeroTime && !k.EndTime.After(inc.EndTime) { + continue + } + inc.Update(k.Close.Float64(), k.Volume.Float64()) + } + inc.EmitUpdate(inc.Last()) + inc.EndTime = kLines[len(kLines)-1].EndTime.Time() +} + +func (inc *OBV) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *OBV) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/obv_callbacks.go b/pkg/indicator/obv_callbacks.go new file mode 100644 index 0000000000..b0897152c8 --- /dev/null +++ b/pkg/indicator/obv_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type OBV"; DO NOT EDIT. + +package indicator + +import () + +func (inc *OBV) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *OBV) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/obv_test.go b/pkg/indicator/obv_test.go new file mode 100644 index 0000000000..66d951a29d --- /dev/null +++ b/pkg/indicator/obv_test.go @@ -0,0 +1,61 @@ +package indicator + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +const Delta = 1e-9 + +func Test_calculateOBV(t *testing.T) { + buildKLines := func(prices, volumes []fixedpoint.Value) (kLines []types.KLine) { + for i, p := range prices { + kLines = append(kLines, types.KLine{High: p, Low: p, Close: p, Volume: volumes[i]}) + } + return kLines + } + var easy1 = []byte(`[3, 2, 1, 4]`) + var easy2 = []byte(`[3, 2, 2, 6]`) + var input1 []fixedpoint.Value + var input2 []fixedpoint.Value + _ = json.Unmarshal(easy1, &input1) + _ = json.Unmarshal(easy2, &input2) + + tests := []struct { + name string + kLines []types.KLine + window int + want types.Float64Slice + }{ + { + name: "trivial_case", + kLines: buildKLines( + []fixedpoint.Value{fixedpoint.Zero}, []fixedpoint.Value{fixedpoint.One}, + ), + window: 0, + want: types.Float64Slice{1.0}, + }, + { + name: "easy_case", + kLines: buildKLines(input1, input2), + window: 0, + want: types.Float64Slice{3, 1, -1, 5}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + obv := OBV{IntervalWindow: types.IntervalWindow{Window: tt.window}} + obv.calculateAndUpdate(tt.kLines) + assert.Equal(t, len(obv.Values), len(tt.want)) + for i, v := range obv.Values { + assert.InDelta(t, v, tt.want[i], Delta) + } + }) + } +} diff --git a/pkg/indicator/pivot.go b/pkg/indicator/pivot.go new file mode 100644 index 0000000000..ccc1322e20 --- /dev/null +++ b/pkg/indicator/pivot.go @@ -0,0 +1,126 @@ +package indicator + +import ( + "fmt" + "time" + + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/types" +) + +type KLineValueMapper func(k types.KLine) float64 + +//go:generate callbackgen -type Pivot +type Pivot struct { + types.IntervalWindow + + // Values + Lows types.Float64Slice // higher low + Highs types.Float64Slice // lower high + + EndTime time.Time + + updateCallbacks []func(valueLow, valueHigh float64) +} + +func (inc *Pivot) LastLow() float64 { + if len(inc.Lows) == 0 { + return 0.0 + } + return inc.Lows[len(inc.Lows)-1] +} + +func (inc *Pivot) LastHigh() float64 { + if len(inc.Highs) == 0 { + return 0.0 + } + return inc.Highs[len(inc.Highs)-1] +} + +func (inc *Pivot) Update(klines []types.KLine) { + if len(klines) < inc.Window { + return + } + + var end = len(klines) - 1 + var lastKLine = klines[end] + + // skip old data + if inc.EndTime != zeroTime && lastKLine.GetEndTime().Before(inc.EndTime) { + return + } + + recentT := klines[end-(inc.Window-1) : end+1] + + l, h, err := calculatePivot(recentT, inc.Window, KLineLowPriceMapper, KLineHighPriceMapper) + if err != nil { + log.WithError(err).Error("can not calculate pivots") + return + } + + if l > 0.0 { + inc.Lows.Push(l) + } + if h > 0.0 { + inc.Highs.Push(h) + } + + if len(inc.Lows) > MaxNumOfVOL { + inc.Lows = inc.Lows[MaxNumOfVOLTruncateSize-1:] + } + if len(inc.Highs) > MaxNumOfVOL { + inc.Highs = inc.Highs[MaxNumOfVOLTruncateSize-1:] + } + + inc.EndTime = klines[end].GetEndTime().Time() + + inc.EmitUpdate(l, h) + +} + +func (inc *Pivot) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.Update(window) +} + +func (inc *Pivot) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +func calculatePivot(klines []types.KLine, window int, valLow KLineValueMapper, valHigh KLineValueMapper) (float64, float64, error) { + length := len(klines) + if length == 0 || length < window { + return 0., 0., fmt.Errorf("insufficient elements for calculating with window = %d", window) + } + + var lows types.Float64Slice + var highs types.Float64Slice + for _, k := range klines { + lows.Push(valLow(k)) + highs.Push(valHigh(k)) + } + + pl := 0. + if lows.Min() == lows.Index(int(window/2.)-1) { + pl = lows.Min() + } + + ph := 0. + if highs.Max() == highs.Index(int(window/2.)-1) { + ph = highs.Max() + } + + return pl, ph, nil +} + +func KLineLowPriceMapper(k types.KLine) float64 { + return k.Low.Float64() +} + +func KLineHighPriceMapper(k types.KLine) float64 { + return k.High.Float64() +} diff --git a/pkg/indicator/pivot_callbacks.go b/pkg/indicator/pivot_callbacks.go new file mode 100644 index 0000000000..4c3a90ccf0 --- /dev/null +++ b/pkg/indicator/pivot_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type Pivot"; DO NOT EDIT. + +package indicator + +import () + +func (inc *Pivot) OnUpdate(cb func(valueLow float64, valueHigh float64)) { + inc.updateCallbacks = append(inc.updateCallbacks, cb) +} + +func (inc *Pivot) EmitUpdate(valueLow float64, valueHigh float64) { + for _, cb := range inc.updateCallbacks { + cb(valueLow, valueHigh) + } +} diff --git a/pkg/indicator/rma.go b/pkg/indicator/rma.go new file mode 100644 index 0000000000..8fee7a1283 --- /dev/null +++ b/pkg/indicator/rma.go @@ -0,0 +1,86 @@ +package indicator + +import ( + "time" + + "github.com/c9s/bbgo/pkg/types" +) + +// Running Moving Average +// Refer: https://github.com/twopirllc/pandas-ta/blob/main/pandas_ta/overlap/rma.py#L5 +// Refer: https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.ewm.html#pandas-dataframe-ewm +//go:generate callbackgen -type RMA +type RMA struct { + types.IntervalWindow + Values types.Float64Slice + counter int + Adjust bool + tmp float64 + sum float64 + EndTime time.Time + UpdateCallbacks []func(value float64) +} + +func (inc *RMA) Update(x float64) { + lambda := 1 / float64(inc.Window) + if inc.counter == 0 { + inc.sum = 1 + inc.tmp = x + } else { + if inc.Adjust { + inc.sum = inc.sum*(1-lambda) + 1 + inc.tmp = inc.tmp + (x-inc.tmp)/inc.sum + } else { + inc.tmp = inc.tmp*(1-lambda) + x*lambda + } + } + inc.counter++ + + if inc.counter < inc.Window { + inc.Values.Push(0) + return + } + + inc.Values.Push(inc.tmp) +} + +func (inc *RMA) Last() float64 { + return inc.Values.Last() +} + +func (inc *RMA) Index(i int) float64 { + length := len(inc.Values) + if length == 0 || length-i-1 < 0 { + return 0 + } + return inc.Values[length-i-1] +} + +func (inc *RMA) Length() int { + return len(inc.Values) +} + +var _ types.Series = &RMA{} + +func (inc *RMA) calculateAndUpdate(kLines []types.KLine) { + for _, k := range kLines { + if inc.EndTime != zeroTime && !k.EndTime.After(inc.EndTime) { + continue + } + inc.Update(k.Close.Float64()) + } + + inc.EmitUpdate(inc.Last()) + inc.EndTime = kLines[len(kLines)-1].EndTime.Time() +} +func (inc *RMA) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *RMA) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/rma_callbacks.go b/pkg/indicator/rma_callbacks.go new file mode 100644 index 0000000000..f5a40ca5ea --- /dev/null +++ b/pkg/indicator/rma_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type RMA"; DO NOT EDIT. + +package indicator + +import () + +func (inc *RMA) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *RMA) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/rsi.go b/pkg/indicator/rsi.go new file mode 100644 index 0000000000..b9eabd6f48 --- /dev/null +++ b/pkg/indicator/rsi.go @@ -0,0 +1,101 @@ +package indicator + +import ( + "math" + "time" + + "github.com/c9s/bbgo/pkg/types" +) + +/* +rsi implements Relative Strength Index (RSI) + +https://www.investopedia.com/terms/r/rsi.asp +*/ +//go:generate callbackgen -type RSI +type RSI struct { + types.IntervalWindow + Values types.Float64Slice + Prices types.Float64Slice + PreviousAvgLoss float64 + PreviousAvgGain float64 + + EndTime time.Time + UpdateCallbacks []func(value float64) +} + +func (inc *RSI) Update(price float64) { + inc.Prices.Push(price) + + if len(inc.Prices) < inc.Window+1 { + return + } + + var avgGain float64 + var avgLoss float64 + if len(inc.Prices) == inc.Window+1 { + priceDifferences := inc.Prices.Diff() + + avgGain = priceDifferences.PositiveValuesOrZero().Abs().Sum() / float64(inc.Window) + avgLoss = priceDifferences.NegativeValuesOrZero().Abs().Sum() / float64(inc.Window) + } else { + difference := price - inc.Prices[len(inc.Prices)-2] + currentGain := math.Max(difference, 0) + currentLoss := -math.Min(difference, 0) + + avgGain = (inc.PreviousAvgGain*13 + currentGain) / float64(inc.Window) + avgLoss = (inc.PreviousAvgLoss*13 + currentLoss) / float64(inc.Window) + } + + rs := avgGain / avgLoss + rsi := 100 - (100 / (1 + rs)) + inc.Values.Push(rsi) + + inc.PreviousAvgGain = avgGain + inc.PreviousAvgLoss = avgLoss +} + +func (inc *RSI) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *RSI) Index(i int) float64 { + length := len(inc.Values) + if length <= 0 || length-i-1 < 0 { + return 0.0 + } + return inc.Values[length-i-1] +} + +func (inc *RSI) Length() int { + return len(inc.Values) +} + +var _ types.Series = &RSI{} + +func (inc *RSI) calculateAndUpdate(kLines []types.KLine) { + for _, k := range kLines { + if inc.EndTime != zeroTime && !k.EndTime.After(inc.EndTime) { + continue + } + inc.Update(k.Close.Float64()) + } + + inc.EmitUpdate(inc.Last()) + inc.EndTime = kLines[len(kLines)-1].EndTime.Time() +} + +func (inc *RSI) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *RSI) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/rsi_callbacks.go b/pkg/indicator/rsi_callbacks.go new file mode 100644 index 0000000000..2c1a11f661 --- /dev/null +++ b/pkg/indicator/rsi_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type RSI"; DO NOT EDIT. + +package indicator + +import () + +func (inc *RSI) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *RSI) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/rsi_test.go b/pkg/indicator/rsi_test.go new file mode 100644 index 0000000000..80e4c91870 --- /dev/null +++ b/pkg/indicator/rsi_test.go @@ -0,0 +1,69 @@ +package indicator + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func Test_calculateRSI(t *testing.T) { + // test case from https://school.stockcharts.com/doku.php?id=technical_indicators:relative_strength_index_rsi + buildKLines := func(prices []fixedpoint.Value) (kLines []types.KLine) { + for _, p := range prices { + kLines = append(kLines, types.KLine{High: p, Low: p, Close: p}) + } + return kLines + } + var data = []byte(`[44.34, 44.09, 44.15, 43.61, 44.33, 44.83, 45.10, 45.42, 45.84, 46.08, 45.89, 46.03, 45.61, 46.28, 46.28, 46.00, 46.03, 46.41, 46.22, 45.64, 46.21, 46.25, 45.71, 46.45, 45.78, 45.35, 44.03, 44.18, 44.22, 44.57, 43.42, 42.66, 43.13]`) + var values []fixedpoint.Value + _ = json.Unmarshal(data, &values) + + tests := []struct { + name string + kLines []types.KLine + window int + want types.Float64Slice + }{ + { + name: "RSI", + kLines: buildKLines(values), + window: 14, + want: types.Float64Slice{ + 70.46413502109704, + 66.24961855355505, + 66.48094183471265, + 69.34685316290864, + 66.29471265892624, + 57.91502067008556, + 62.88071830996241, + 63.208788718287764, + 56.01158478954758, + 62.33992931089789, + 54.67097137765515, + 50.386815195114224, + 40.01942379131357, + 41.49263540422282, + 41.902429678458105, + 45.499497238680405, + 37.32277831337995, + 33.090482572723396, + 37.78877198205783, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + rsi := RSI{IntervalWindow: types.IntervalWindow{Window: tt.window}} + rsi.calculateAndUpdate(tt.kLines) + assert.Equal(t, len(rsi.Values), len(tt.want)) + for i, v := range rsi.Values { + assert.InDelta(t, v, tt.want[i], Delta) + } + }) + } +} diff --git a/pkg/indicator/sma.go b/pkg/indicator/sma.go index a44c6e27e4..d500c5d6fa 100644 --- a/pkg/indicator/sma.go +++ b/pkg/indicator/sma.go @@ -9,27 +9,57 @@ import ( "github.com/c9s/bbgo/pkg/types" ) -type Float64Slice []float64 - -func (s *Float64Slice) Push(v float64) { - *s = append(*s, v) -} +const MaxNumOfSMA = 5_000 +const MaxNumOfSMATruncateSize = 100 var zeroTime time.Time //go:generate callbackgen -type SMA type SMA struct { types.IntervalWindow - Values Float64Slice + Values types.Float64Slice + Cache types.Float64Slice EndTime time.Time UpdateCallbacks []func(value float64) } func (inc *SMA) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } return inc.Values[len(inc.Values)-1] } +func (inc *SMA) Index(i int) float64 { + length := len(inc.Values) + if length == 0 || length-i-1 < 0 { + return 0.0 + } + + return inc.Values[length-i-1] +} + +func (inc *SMA) Length() int { + return len(inc.Values) +} + +var _ types.Series = &SMA{} + +func (inc *SMA) Update(value float64) { + if len(inc.Cache) < inc.Window { + inc.Cache = append(inc.Cache, value) + if len(inc.Cache) == inc.Window { + inc.Values = append(inc.Values, types.Mean(&inc.Cache)) + } + return + + } + length := len(inc.Values) + newVal := (inc.Values[length-1]*float64(inc.Window-1) + value) / float64(inc.Window) + inc.Values = append(inc.Values, newVal) +} + func (inc *SMA) calculateAndUpdate(kLines []types.KLine) { if len(kLines) < inc.Window { return @@ -50,7 +80,12 @@ func (inc *SMA) calculateAndUpdate(kLines []types.KLine) { return } inc.Values.Push(sma) - inc.EndTime = kLines[index].EndTime + + if len(inc.Values) > MaxNumOfSMA { + inc.Values = inc.Values[MaxNumOfSMATruncateSize-1:] + } + + inc.EndTime = kLines[index].EndTime.Time() inc.EmitUpdate(sma) } diff --git a/pkg/indicator/ssf.go b/pkg/indicator/ssf.go new file mode 100644 index 0000000000..d8c1340d46 --- /dev/null +++ b/pkg/indicator/ssf.go @@ -0,0 +1,114 @@ +package indicator + +import ( + "math" + + "github.com/c9s/bbgo/pkg/types" +) + +// Refer: https://easylanguagemastery.com/indicators/predictive-indicators/ +// Refer: https://github.com/twopirllc/pandas-ta/blob/main/pandas_ta/overlap/ssf.py +// Ehler's Super Smoother Filter +// +// John F. Ehlers's solution to reduce lag and remove aliasing noise with his +// research in aerospace analog filter design. This indicator comes with two +// versions determined by the keyword poles. By default, it uses two poles but +// there is an option for three poles. Since SSF is a (Resursive) Digital Filter, +// the number of poles determine how many prior recursive SSF bars to include in +// the design of the filter. So two poles uses two prior SSF bars and three poles +// uses three prior SSF bars for their filter calculations. +// +//go:generate callbackgen -type SSF +type SSF struct { + types.IntervalWindow + Poles int + c1 float64 + c2 float64 + c3 float64 + c4 float64 + Values types.Float64Slice + + UpdateCallbacks []func(value float64) +} + +func (inc *SSF) Update(value float64) { + if inc.Poles == 3 { + if inc.Values == nil { + x := math.Pi / float64(inc.Window) + a0 := math.Exp(-x) + b0 := 2. * a0 * math.Cos(math.Sqrt(3.)*x) + c0 := a0 * a0 + + inc.c4 = c0 * c0 + inc.c3 = -c0 * (1. + b0) + inc.c2 = c0 + b0 + inc.c1 = 1. - inc.c2 - inc.c3 - inc.c4 + inc.Values = types.Float64Slice{} + } + + result := inc.c1*value + + inc.c2*inc.Values.Index(0) + + inc.c3*inc.Values.Index(1) + + inc.c4*inc.Values.Index(2) + inc.Values.Push(result) + } else { // poles == 2 + if inc.Values == nil { + x := math.Pi * math.Sqrt(2.) / float64(inc.Window) + a0 := math.Exp(-x) + inc.c3 = -a0 * a0 + inc.c2 = 2. * a0 * math.Cos(x) + inc.c1 = 1. - inc.c2 - inc.c3 + inc.Values = types.Float64Slice{} + } + result := inc.c1*value + + inc.c2*inc.Values.Index(0) + + inc.c3*inc.Values.Index(1) + inc.Values.Push(result) + } +} + +func (inc *SSF) Index(i int) float64 { + if inc.Values == nil { + return 0.0 + } + return inc.Values.Index(i) +} + +func (inc *SSF) Length() int { + if inc.Values == nil { + return 0 + } + return inc.Values.Length() +} + +func (inc *SSF) Last() float64 { + if inc.Values == nil { + return 0.0 + } + return inc.Values.Last() +} + +var _ types.Series = &SSF{} + +func (inc *SSF) calculateAndUpdate(allKLines []types.KLine) { + if inc.Values != nil { + inc.Update(allKLines[len(allKLines)-1].Close.Float64()) + inc.EmitUpdate(inc.Last()) + return + } + for _, k := range allKLines { + inc.Update(k.Close.Float64()) + inc.EmitUpdate(inc.Last()) + } +} + +func (inc *SSF) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + inc.calculateAndUpdate(window) +} + +func (inc *SSF) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/ssf_callbacks.go b/pkg/indicator/ssf_callbacks.go new file mode 100644 index 0000000000..cdd2e8acaa --- /dev/null +++ b/pkg/indicator/ssf_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type SSF"; DO NOT EDIT. + +package indicator + +import () + +func (inc *SSF) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *SSF) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/ssf_test.go b/pkg/indicator/ssf_test.go new file mode 100644 index 0000000000..0eced9ca98 --- /dev/null +++ b/pkg/indicator/ssf_test.go @@ -0,0 +1,71 @@ +package indicator + +import ( + "encoding/json" + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" + "github.com/stretchr/testify/assert" +) + +/* +python: + +import pandas as pd +import pandas_ta as ta + +data = pd.Series([0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9]) +size = 5 + +result = ta.ssf(data, size, 2) +print(result) + +result = ta.ssf(data, size, 3) +print(result) +*/ +func Test_SSF(t *testing.T) { + var Delta = 0.00001 + var randomPrices = []byte(`[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]`) + var input []fixedpoint.Value + if err := json.Unmarshal(randomPrices, &input); err != nil { + panic(err) + } + tests := []struct { + name string + kLines []types.KLine + poles int + want float64 + next float64 + all int + }{ + { + name: "pole2", + kLines: buildKLines(input), + poles: 2, + want: 8.721776, + next: 7.723223, + all: 30, + }, + { + name: "pole3", + kLines: buildKLines(input), + poles: 3, + want: 8.687588, + next: 7.668013, + all: 30, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ssf := SSF{ + IntervalWindow: types.IntervalWindow{Window: 5}, + Poles: tt.poles, + } + ssf.calculateAndUpdate(tt.kLines) + assert.InDelta(t, tt.want, ssf.Last(), Delta) + assert.InDelta(t, tt.next, ssf.Index(1), Delta) + assert.Equal(t, tt.all, ssf.Length()) + }) + } +} diff --git a/pkg/indicator/stoch.go b/pkg/indicator/stoch.go new file mode 100644 index 0000000000..c24cd81697 --- /dev/null +++ b/pkg/indicator/stoch.go @@ -0,0 +1,96 @@ +package indicator + +import ( + "time" + + "github.com/c9s/bbgo/pkg/types" +) + +const DPeriod int = 3 + +/* +stoch implements stochastic oscillator indicator + +Stochastic Oscillator +- https://www.investopedia.com/terms/s/stochasticoscillator.asp +*/ +//go:generate callbackgen -type STOCH +type STOCH struct { + types.IntervalWindow + K types.Float64Slice + D types.Float64Slice + + HighValues types.Float64Slice + LowValues types.Float64Slice + + EndTime time.Time + UpdateCallbacks []func(k float64, d float64) +} + +func (inc *STOCH) Update(high, low, cloze float64) { + inc.HighValues.Push(high) + inc.LowValues.Push(low) + + lowest := inc.LowValues.Tail(inc.Window).Min() + highest := inc.HighValues.Tail(inc.Window).Max() + + if highest == lowest { + inc.K.Push(50.0) + } else { + k := 100.0 * (cloze - lowest) / (highest - lowest) + inc.K.Push(k) + } + + d := inc.K.Tail(DPeriod).Mean() + inc.D.Push(d) +} + +func (inc *STOCH) LastK() float64 { + if len(inc.K) == 0 { + return 0.0 + } + return inc.K[len(inc.K)-1] +} + +func (inc *STOCH) LastD() float64 { + if len(inc.K) == 0 { + return 0.0 + } + return inc.D[len(inc.D)-1] +} + +func (inc *STOCH) calculateAndUpdate(kLines []types.KLine) { + if len(kLines) < inc.Window || len(kLines) < DPeriod { + return + } + + for _, k := range kLines { + if inc.EndTime != zeroTime && !k.EndTime.After(inc.EndTime) { + continue + } + inc.Update(k.High.Float64(), k.Low.Float64(), k.Close.Float64()) + } + + inc.EmitUpdate(inc.LastK(), inc.LastD()) + inc.EndTime = kLines[len(kLines)-1].EndTime.Time() +} + +func (inc *STOCH) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *STOCH) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +func (inc *STOCH) GetD() types.Series { + return &inc.D +} + +func (inc *STOCH) GetK() types.Series { + return &inc.K +} diff --git a/pkg/indicator/stoch_callbacks.go b/pkg/indicator/stoch_callbacks.go new file mode 100644 index 0000000000..dcc07e76a5 --- /dev/null +++ b/pkg/indicator/stoch_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type STOCH"; DO NOT EDIT. + +package indicator + +import () + +func (inc *STOCH) OnUpdate(cb func(k float64, d float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *STOCH) EmitUpdate(k float64, d float64) { + for _, cb := range inc.UpdateCallbacks { + cb(k, d) + } +} diff --git a/pkg/indicator/stoch_test.go b/pkg/indicator/stoch_test.go new file mode 100644 index 0000000000..f8a90bfee8 --- /dev/null +++ b/pkg/indicator/stoch_test.go @@ -0,0 +1,74 @@ +package indicator + +import ( + "encoding/json" + "math" + "testing" + "time" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +/* +python +import pandas as pd +import pandas_ta as ta + +klines = ... +df = pd.DataFrame(klines, columns=['open', 'high', 'low', 'close', 'volume']) + print(df.ta.stoch(df['high'], df['low'], df['close'], k=14, d=3, smooth_k=1)) +*/ +func TestSTOCH_update(t *testing.T) { + open := []byte(`[8273.0, 8280.0, 8280.0, 8275.0, 8281.0, 8277.0, 8279.0, 8280.0, 8284.0, 8286.0, 8283.0, 8283.0, 8284.0, 8286.0, 8285.0, 8287.0, 8289.0, 8282.0, 8286.0, 8279.0, 8275.0, 8276.0, 8276.0, 8281.0, 8269.0, 8256.0, 8258.0, 8252.0, 8241.0, 8232.0, 8218.0, 8221.0, 8216.0, 8210.0, 8212.0, 8201.0, 8197.0, 8200.0, 8193.0, 8181.0, 8185.0, 8190.0, 8184.0, 8185.0, 8163.0, 8153.0, 8162.0, 8165.0, 8162.0, 8157.0, 8159.0, 8141.0, 8140.0, 8141.0, 8130.0, 8144.0, 8141.0, 8148.0, 8145.0, 8134.0, 8123.0, 8127.0, 8130.0, 8125.0, 8122.0, 8105.0, 8096.0, 8103.0, 8102.0, 8110.0, 8104.0, 8109.0, 8103.0, 8111.0, 8112.0, 8109.0, 8092.0, 8100.0, 8101.0, 8100.0, 8096.0, 8095.0, 8094.0, 8101.0, 8095.0, 8069.0, 8067.0, 8070.0, 8069.0, 8066.0, 8047.0, 8046.0, 8042.0, 8039.0, 8049.0, 8055.0, 8063.0, 8061.0, 8056.0, 8057.0, 8056.0, 8057.0, 8057.0, 8054.0, 8056.0, 8056.0, 8065.0, 8065.0, 8070.0, 8065.0, 8064.0, 8063.0, 8060.0, 8065.0, 8068.0, 8068.0, 8069.0, 8073.0, 8073.0, 8084.0, 8084.0, 8076.0, 8074.0, 8074.0, 8074.0, 8078.0, 8080.0, 8082.0, 8085.0, 8083.0, 8087.0, 8087.0, 8083.0, 8083.0, 8082.0, 8074.0, 8074.0, 8071.0, 8071.0, 8072.0, 8075.0, 8075.0, 8076.0, 8073.0, 8071.0, 8070.0, 8075.0, 8078.0, 8077.0, 8075.0, 8073.0, 8079.0, 8084.0, 8082.0, 8085.0, 8085.0, 8085.0, 8101.0, 8106.0, 8113.0, 8109.0, 8104.0, 8105.0, 8105.0, 8107.0, 8106.0, 8104.0, 8106.0, 8106.0, 8110.0, 8107.0, 8110.0, 8111.0, 8104.0, 8098.0, 8098.0, 8098.0, 8098.0, 8094.0, 8097.0, 8096.0, 8099.0, 8098.0, 8099.0, 8098.0, 8095.0, 8096.0, 8086.0, 8088.0, 8093.0, 8092.0, 8096.0, 8100.0, 8104.0, 8104.0, 8108.0, 8107.0, 8103.0, 8104.0, 8110.0, 8105.0, 8102.0, 8104.0, 8096.0, 8099.0, 8103.0, 8102.0, 8108.0, 8107.0, 8107.0, 8104.0, 8095.0, 8091.0, 8092.0, 8090.0, 8093.0, 8093.0, 8094.0, 8095.0, 8096.0, 8088.0, 8090.0, 8079.0, 8077.0, 8079.0, 8081.0, 8083.0, 8084.0, 8084.0, 8087.0, 8091.0, 8089.0, 8089.0, 8091.0, 8087.0, 8093.0, 8090.0, 8090.0, 8095.0, 8093.0, 8088.0, 8087.0, 8090.0, 8089.0, 8087.0, 8084.0, 8087.0, 8084.0, 8080.0, 8078.0, 8077.0, 8077.0, 8076.0, 8072.0, 8072.0, 8075.0, 8076.0, 8074.0, 8077.0, 8081.0, 8080.0, 8076.0, 8075.0, 8077.0, 8080.0, 8077.0, 8076.0, 8076.0, 8070.0, 8071.0, 8070.0, 8073.0, 8069.0, 8069.0, 8068.0, 8072.0, 8078.0, 8077.0, 8079.0, 8081.0, 8076.0, 8076.0, 8077.0, 8077.0, 8078.0, 8075.0, 8066.0, 8064.0, 8064.0, 8062.0, 8062.0, 8065.0, 8062.0, 8063.0, 8074.0, 8070.0, 8069.0, 8068.0, 8074.0, 8075.0]`) + high := []byte(`[8279.0, 8282.0, 8280.0, 8280.0, 8284.0, 8284.0, 8280.0, 8282.0, 8284.0, 8289.0, 8288.0, 8285.0, 8284.0, 8287.0, 8286.0, 8294.0, 8290.0, 8292.0, 8289.0, 8288.0, 8278.0, 8279.0, 8279.0, 8284.0, 8282.0, 8270.0, 8261.0, 8260.0, 8252.0, 8244.0, 8233.0, 8227.0, 8222.0, 8217.0, 8217.0, 8211.0, 8202.0, 8203.0, 8203.0, 8196.0, 8186.0, 8193.0, 8194.0, 8187.0, 8185.0, 8168.0, 8165.0, 8169.0, 8166.0, 8163.0, 8162.0, 8159.0, 8143.0, 8148.0, 8143.0, 8146.0, 8152.0, 8149.0, 8152.0, 8147.0, 8138.0, 8128.0, 8134.0, 8131.0, 8133.0, 8123.0, 8106.0, 8105.0, 8104.0, 8113.0, 8112.0, 8112.0, 8111.0, 8114.0, 8115.0, 8114.0, 8110.0, 8101.0, 8107.0, 8103.0, 8100.0, 8101.0, 8100.0, 8102.0, 8101.0, 8100.0, 8070.0, 8076.0, 8072.0, 8072.0, 8069.0, 8050.0, 8048.0, 8044.0, 8049.0, 8055.0, 8063.0, 8070.0, 8067.0, 8061.0, 8059.0, 8060.0, 8063.0, 8058.0, 8061.0, 8061.0, 8068.0, 8066.0, 8071.0, 8073.0, 8068.0, 8066.0, 8066.0, 8065.0, 8070.0, 8072.0, 8072.0, 8075.0, 8078.0, 8084.0, 8085.0, 8084.0, 8077.0, 8076.0, 8075.0, 8079.0, 8081.0, 8083.0, 8088.0, 8086.0, 8088.0, 8088.0, 8092.0, 8086.0, 8086.0, 8083.0, 8075.0, 8074.0, 8073.0, 8073.0, 8077.0, 8077.0, 8078.0, 8077.0, 8076.0, 8073.0, 8075.0, 8079.0, 8079.0, 8078.0, 8074.0, 8080.0, 8086.0, 8086.0, 8085.0, 8085.0, 8087.0, 8102.0, 8109.0, 8113.0, 8114.0, 8110.0, 8105.0, 8106.0, 8109.0, 8114.0, 8107.0, 8106.0, 8106.0, 8110.0, 8111.0, 8110.0, 8112.0, 8112.0, 8109.0, 8102.0, 8098.0, 8099.0, 8098.0, 8097.0, 8099.0, 8099.0, 8099.0, 8102.0, 8099.0, 8099.0, 8096.0, 8097.0, 8091.0, 8094.0, 8094.0, 8096.0, 8102.0, 8106.0, 8109.0, 8109.0, 8110.0, 8108.0, 8106.0, 8110.0, 8122.0, 8105.0, 8105.0, 8104.0, 8103.0, 8104.0, 8103.0, 8110.0, 8110.0, 8107.0, 8109.0, 8105.0, 8097.0, 8095.0, 8093.0, 8094.0, 8097.0, 8096.0, 8096.0, 8096.0, 8097.0, 8092.0, 8090.0, 8081.0, 8081.0, 8083.0, 8087.0, 8085.0, 8085.0, 8087.0, 8092.0, 8094.0, 8090.0, 8093.0, 8092.0, 8094.0, 8093.0, 8091.0, 8095.0, 8095.0, 8092.0, 8089.0, 8090.0, 8090.0, 8091.0, 8088.0, 8089.0, 8089.0, 8085.0, 8081.0, 8080.0, 8078.0, 8078.0, 8076.0, 8073.0, 8077.0, 8078.0, 8077.0, 8077.0, 8083.0, 8082.0, 8082.0, 8077.0, 8079.0, 8082.0, 8080.0, 8077.0, 8078.0, 8076.0, 8073.0, 8074.0, 8073.0, 8073.0, 8070.0, 8070.0, 8072.0, 8079.0, 8078.0, 8079.0, 8081.0, 8083.0, 8077.0, 8078.0, 8080.0, 8079.0, 8080.0, 8077.0, 8069.0, 8071.0, 8066.0, 8064.0, 8066.0, 8066.0, 8063.0, 8074.0, 8075.0, 8071.0, 8070.0, 8075.0, 8075.0]`) + low := []byte(`[8260.0, 8272.0, 8275.0, 8274.0, 8275.0, 8277.0, 8276.0, 8278.0, 8277.0, 8283.0, 8282.0, 8283.0, 8283.0, 8283.0, 8283.0, 8279.0, 8281.0, 8282.0, 8277.0, 8276.0, 8273.0, 8275.0, 8274.0, 8275.0, 8266.0, 8256.0, 8255.0, 8250.0, 8239.0, 8230.0, 8214.0, 8218.0, 8216.0, 8208.0, 8209.0, 8201.0, 8190.0, 8195.0, 8193.0, 8181.0, 8175.0, 8183.0, 8182.0, 8181.0, 8159.0, 8152.0, 8150.0, 8160.0, 8161.0, 8153.0, 8153.0, 8137.0, 8135.0, 8139.0, 8130.0, 8130.0, 8140.0, 8137.0, 8145.0, 8134.0, 8123.0, 8116.0, 8122.0, 8124.0, 8122.0, 8105.0, 8096.0, 8096.0, 8097.0, 8100.0, 8100.0, 8104.0, 8101.0, 8103.0, 8109.0, 8108.0, 8089.0, 8092.0, 8097.0, 8098.0, 8094.0, 8092.0, 8087.0, 8094.0, 8094.0, 8069.0, 8058.0, 8065.0, 8066.0, 8065.0, 8046.0, 8041.0, 8036.0, 8038.0, 8039.0, 8047.0, 8053.0, 8058.0, 8056.0, 8056.0, 8053.0, 8052.0, 8054.0, 8051.0, 8053.0, 8056.0, 8055.0, 8063.0, 8064.0, 8063.0, 8062.0, 8061.0, 8059.0, 8059.0, 8063.0, 8066.0, 8067.0, 8068.0, 8071.0, 8071.0, 8079.0, 8074.0, 8073.0, 8074.0, 8073.0, 8073.0, 8076.0, 8079.0, 8080.0, 8083.0, 8083.0, 8085.0, 8082.0, 8082.0, 8081.0, 8072.0, 8072.0, 8068.0, 8070.0, 8070.0, 8072.0, 8074.0, 8075.0, 8073.0, 8071.0, 8070.0, 8067.0, 8074.0, 8076.0, 8072.0, 8070.0, 8072.0, 8079.0, 8081.0, 8082.0, 8082.0, 8084.0, 8083.0, 8097.0, 8103.0, 8107.0, 8104.0, 8103.0, 8104.0, 8103.0, 8105.0, 8103.0, 8102.0, 8102.0, 8103.0, 8106.0, 8107.0, 8108.0, 8102.0, 8098.0, 8096.0, 8095.0, 8096.0, 8093.0, 8094.0, 8094.0, 8096.0, 8097.0, 8097.0, 8096.0, 8094.0, 8094.0, 8086.0, 8086.0, 8087.0, 8090.0, 8091.0, 8095.0, 8099.0, 8104.0, 8102.0, 8106.0, 8101.0, 8103.0, 8104.0, 8104.0, 8101.0, 8102.0, 8096.0, 8096.0, 8098.0, 8100.0, 8102.0, 8106.0, 8103.0, 8103.0, 8094.0, 8090.0, 8090.0, 8089.0, 8088.0, 8090.0, 8093.0, 8094.0, 8094.0, 8088.0, 8087.0, 8079.0, 8075.0, 8076.0, 8077.0, 8081.0, 8083.0, 8083.0, 8084.0, 8087.0, 8089.0, 8088.0, 8088.0, 8086.0, 8087.0, 8090.0, 8088.0, 8090.0, 8091.0, 8087.0, 8087.0, 8086.0, 8088.0, 8087.0, 8082.0, 8083.0, 8083.0, 8078.0, 8077.0, 8077.0, 8072.0, 8074.0, 8071.0, 8070.0, 8072.0, 8073.0, 8073.0, 8072.0, 8076.0, 8079.0, 8075.0, 8075.0, 8075.0, 8076.0, 8076.0, 8074.0, 8076.0, 8069.0, 8068.0, 8069.0, 8069.0, 8065.0, 8067.0, 8067.0, 8067.0, 8073.0, 8075.0, 8076.0, 8077.0, 8075.0, 8072.0, 8074.0, 8075.0, 8074.0, 8072.0, 8066.0, 8063.0, 8062.0, 8058.0, 8060.0, 8059.0, 8060.0, 8059.0, 8062.0, 8067.0, 8068.0, 8067.0, 8068.0, 8071.0]`) + close := []byte(`[8262.0, 8273.0, 8279.0, 8279.0, 8275.0, 8282.0, 8278.0, 8279.0, 8281.0, 8285.0, 8287.0, 8284.0, 8283.0, 8283.0, 8285.0, 8286.0, 8287.0, 8290.0, 8283.0, 8287.0, 8278.0, 8275.0, 8276.0, 8275.0, 8281.0, 8270.0, 8257.0, 8258.0, 8252.0, 8243.0, 8231.0, 8219.0, 8220.0, 8216.0, 8210.0, 8211.0, 8201.0, 8197.0, 8201.0, 8193.0, 8183.0, 8184.0, 8191.0, 8184.0, 8185.0, 8161.0, 8154.0, 8163.0, 8164.0, 8162.0, 8156.0, 8158.0, 8141.0, 8139.0, 8142.0, 8130.0, 8145.0, 8140.0, 8149.0, 8146.0, 8136.0, 8123.0, 8126.0, 8130.0, 8125.0, 8122.0, 8106.0, 8096.0, 8103.0, 8102.0, 8111.0, 8105.0, 8111.0, 8103.0, 8112.0, 8113.0, 8109.0, 8093.0, 8101.0, 8101.0, 8100.0, 8095.0, 8096.0, 8095.0, 8100.0, 8095.0, 8069.0, 8068.0, 8072.0, 8068.0, 8067.0, 8046.0, 8045.0, 8043.0, 8040.0, 8049.0, 8055.0, 8062.0, 8062.0, 8058.0, 8056.0, 8055.0, 8058.0, 8057.0, 8054.0, 8056.0, 8057.0, 8066.0, 8065.0, 8069.0, 8064.0, 8063.0, 8064.0, 8059.0, 8065.0, 8069.0, 8068.0, 8069.0, 8072.0, 8074.0, 8084.0, 8084.0, 8076.0, 8074.0, 8074.0, 8075.0, 8077.0, 8080.0, 8082.0, 8086.0, 8084.0, 8087.0, 8087.0, 8083.0, 8083.0, 8082.0, 8074.0, 8073.0, 8072.0, 8071.0, 8072.0, 8075.0, 8076.0, 8076.0, 8074.0, 8071.0, 8071.0, 8075.0, 8079.0, 8077.0, 8074.0, 8072.0, 8079.0, 8084.0, 8082.0, 8085.0, 8086.0, 8084.0, 8102.0, 8107.0, 8113.0, 8109.0, 8104.0, 8104.0, 8105.0, 8108.0, 8106.0, 8104.0, 8106.0, 8105.0, 8110.0, 8107.0, 8109.0, 8112.0, 8104.0, 8099.0, 8097.0, 8097.0, 8098.0, 8095.0, 8096.0, 8097.0, 8099.0, 8098.0, 8099.0, 8099.0, 8095.0, 8097.0, 8086.0, 8088.0, 8093.0, 8092.0, 8096.0, 8101.0, 8105.0, 8105.0, 8109.0, 8107.0, 8103.0, 8104.0, 8109.0, 8105.0, 8102.0, 8104.0, 8097.0, 8100.0, 8103.0, 8103.0, 8109.0, 8107.0, 8106.0, 8104.0, 8096.0, 8090.0, 8092.0, 8089.0, 8093.0, 8093.0, 8094.0, 8095.0, 8096.0, 8088.0, 8089.0, 8079.0, 8077.0, 8079.0, 8082.0, 8083.0, 8084.0, 8084.0, 8087.0, 8091.0, 8088.0, 8088.0, 8091.0, 8087.0, 8092.0, 8090.0, 8091.0, 8095.0, 8092.0, 8088.0, 8087.0, 8090.0, 8089.0, 8087.0, 8084.0, 8088.0, 8084.0, 8079.0, 8078.0, 8078.0, 8076.0, 8075.0, 8071.0, 8072.0, 8074.0, 8077.0, 8074.0, 8077.0, 8081.0, 8080.0, 8076.0, 8076.0, 8078.0, 8079.0, 8076.0, 8076.0, 8076.0, 8070.0, 8072.0, 8069.0, 8072.0, 8070.0, 8069.0, 8069.0, 8073.0, 8078.0, 8077.0, 8079.0, 8080.0, 8076.0, 8076.0, 8076.0, 8077.0, 8078.0, 8075.0, 8067.0, 8064.0, 8064.0, 8062.0, 8062.0, 8065.0, 8062.0, 8063.0, 8074.0, 8070.0, 8069.0, 8068.0, 8074.0]`) + + buildKLines := func(open, high, low, close []fixedpoint.Value) (kLines []types.KLine) { + for i := range high { + kLines = append(kLines, types.KLine{Open: open[i], High: high[i], Low: low[i], Close: close[i], EndTime: types.Time(time.Now())}) + } + return kLines + } + var o, h, l, c []fixedpoint.Value + _ = json.Unmarshal(open, &o) + _ = json.Unmarshal(high, &h) + _ = json.Unmarshal(low, &l) + _ = json.Unmarshal(close, &c) + + tests := []struct { + name string + kLines []types.KLine + window int + want_k float64 + want_d float64 + }{ + { + name: "TXF1-1min_2016/1/4", + kLines: buildKLines(o, h, l, c), + window: 14, + want_k: 84.210526, + want_d: 59.888357, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + kd := STOCH{IntervalWindow: types.IntervalWindow{Window: tt.window}} + kd.calculateAndUpdate(tt.kLines) + + got_k := kd.LastK() + diff_k := math.Trunc((got_k-tt.want_k)*100) / 100 + if diff_k != 0 { + t.Errorf("%%K() = %v, want %v", got_k, tt.want_k) + } + + got_d := kd.LastD() + diff_d := math.Trunc((got_d-tt.want_d)*100) / 100 + if diff_d != 0 { + t.Errorf("%%D() = %v, want %v", got_d, tt.want_d) + } + }) + } +} diff --git a/pkg/indicator/supertrend.go b/pkg/indicator/supertrend.go new file mode 100644 index 0000000000..c195dc4e65 --- /dev/null +++ b/pkg/indicator/supertrend.go @@ -0,0 +1,147 @@ +package indicator + +import ( + "github.com/sirupsen/logrus" + "math" + "time" + + "github.com/c9s/bbgo/pkg/types" +) + +var logst = logrus.WithField("indicator", "supertrend") + +//go:generate callbackgen -type Supertrend +type Supertrend struct { + types.IntervalWindow + ATRMultiplier float64 `json:"atrMultiplier"` + + AverageTrueRange *ATR + + trendPrices types.Float64Slice + + closePrice float64 + previousClosePrice float64 + uptrendPrice float64 + previousUptrendPrice float64 + downtrendPrice float64 + previousDowntrendPrice float64 + + trend types.Direction + previousTrend types.Direction + tradeSignal types.Direction + + EndTime time.Time + UpdateCallbacks []func(value float64) +} + +func (inc *Supertrend) Last() float64 { + return inc.trendPrices.Last() +} + +func (inc *Supertrend) Index(i int) float64 { + length := inc.Length() + if length == 0 || length-i-1 < 0 { + return 0 + } + return inc.trendPrices[length-i-1] +} + +func (inc *Supertrend) Length() int { + return len(inc.trendPrices) +} +func (inc *Supertrend) Update(highPrice, lowPrice, closePrice float64) { + if inc.Window <= 0 { + panic("window must be greater than 0") + } + + // Start with DirectionUp + if inc.trend != types.DirectionUp && inc.trend != types.DirectionDown { + inc.trend = types.DirectionUp + } + + // Update ATR + inc.AverageTrueRange.Update(highPrice, lowPrice, closePrice) + + // Update last prices + inc.previousUptrendPrice = inc.uptrendPrice + inc.previousDowntrendPrice = inc.downtrendPrice + inc.previousClosePrice = inc.closePrice + inc.previousTrend = inc.trend + + inc.closePrice = closePrice + + src := (highPrice + lowPrice) / 2 + + // Update uptrend + inc.uptrendPrice = src - inc.AverageTrueRange.Last()*inc.ATRMultiplier + if inc.previousClosePrice > inc.previousUptrendPrice { + inc.uptrendPrice = math.Max(inc.uptrendPrice, inc.previousUptrendPrice) + } + + // Update downtrend + inc.downtrendPrice = src + inc.AverageTrueRange.Last()*inc.ATRMultiplier + if inc.previousClosePrice < inc.previousDowntrendPrice { + inc.downtrendPrice = math.Min(inc.downtrendPrice, inc.previousDowntrendPrice) + } + + // Update trend + if inc.previousTrend == types.DirectionUp && inc.closePrice < inc.previousUptrendPrice { + inc.trend = types.DirectionDown + } else if inc.previousTrend == types.DirectionDown && inc.closePrice > inc.previousDowntrendPrice { + inc.trend = types.DirectionUp + } else { + inc.trend = inc.previousTrend + } + + // Update signal + if inc.AverageTrueRange.Last() <= 0 { + inc.tradeSignal = types.DirectionNone + } else if inc.trend == types.DirectionUp && inc.previousTrend == types.DirectionDown { + inc.tradeSignal = types.DirectionUp + } else if inc.trend == types.DirectionDown && inc.previousTrend == types.DirectionUp { + inc.tradeSignal = types.DirectionDown + } else { + inc.tradeSignal = types.DirectionNone + } + + // Update trend price + if inc.trend == types.DirectionDown { + inc.trendPrices.Push(inc.downtrendPrice) + } else { + inc.trendPrices.Push(inc.uptrendPrice) + } + + logst.Debugf("Update supertrend result: closePrice: %v, uptrendPrice: %v, downtrendPrice: %v, trend: %v,"+ + " tradeSignal: %v, AverageTrueRange.Last(): %v", inc.closePrice, inc.uptrendPrice, inc.downtrendPrice, + inc.trend, inc.tradeSignal, inc.AverageTrueRange.Last()) +} + +func (inc *Supertrend) GetSignal() types.Direction { + return inc.tradeSignal +} + +var _ types.Series = &Supertrend{} + +func (inc *Supertrend) calculateAndUpdate(kLines []types.KLine) { + for _, k := range kLines { + if inc.EndTime != zeroTime && !k.EndTime.After(inc.EndTime) { + continue + } + inc.Update(k.GetHigh().Float64(), k.GetLow().Float64(), k.GetClose().Float64()) + } + + inc.EmitUpdate(inc.Last()) + inc.EndTime = kLines[len(kLines)-1].EndTime.Time() +} + +func (inc *Supertrend) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *Supertrend) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/supertrend_callbacks.go b/pkg/indicator/supertrend_callbacks.go new file mode 100644 index 0000000000..d02345798e --- /dev/null +++ b/pkg/indicator/supertrend_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type Supertrend"; DO NOT EDIT. + +package indicator + +import () + +func (inc *Supertrend) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *Supertrend) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/tema.go b/pkg/indicator/tema.go new file mode 100644 index 0000000000..91d53a63d0 --- /dev/null +++ b/pkg/indicator/tema.go @@ -0,0 +1,78 @@ +package indicator + +import ( + "github.com/c9s/bbgo/pkg/types" +) + +// Refer: Triple Exponential Moving Average (TEMA) +// URL: https://investopedia.com/terms/t/triple-exponential-moving-average.asp + +//go:generate callbackgen -type TEMA +type TEMA struct { + types.IntervalWindow + Values types.Float64Slice + A1 *EWMA + A2 *EWMA + A3 *EWMA + + UpdateCallbacks []func(value float64) +} + +func (inc *TEMA) Update(value float64) { + if len(inc.Values) == 0 { + inc.A1 = &EWMA{IntervalWindow: types.IntervalWindow{inc.Interval, inc.Window}} + inc.A2 = &EWMA{IntervalWindow: types.IntervalWindow{inc.Interval, inc.Window}} + inc.A3 = &EWMA{IntervalWindow: types.IntervalWindow{inc.Interval, inc.Window}} + } + inc.A1.Update(value) + a1 := inc.A1.Last() + inc.A2.Update(a1) + a2 := inc.A2.Last() + inc.A3.Update(a2) + a3 := inc.A3.Last() + inc.Values.Push(3*a1 - 3*a2 + a3) +} + +func (inc *TEMA) Last() float64 { + if len(inc.Values) > 0 { + return inc.Values[len(inc.Values)-1] + } + return 0.0 +} + +func (inc *TEMA) Index(i int) float64 { + if i >= len(inc.Values) { + return 0 + } + return inc.Values[len(inc.Values)-i-1] +} + +func (inc *TEMA) Length() int { + return len(inc.Values) +} + +var _ types.Series = &TEMA{} + +func (inc *TEMA) calculateAndUpdate(allKLines []types.KLine) { + if inc.A1 == nil { + for _, k := range allKLines { + inc.Update(k.Close.Float64()) + inc.EmitUpdate(inc.Last()) + } + } else { + inc.Update(allKLines[len(allKLines)-1].Close.Float64()) + inc.EmitUpdate(inc.Last()) + } +} + +func (inc *TEMA) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *TEMA) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/tema_callbacks.go b/pkg/indicator/tema_callbacks.go new file mode 100644 index 0000000000..ed63757bad --- /dev/null +++ b/pkg/indicator/tema_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type TEMA"; DO NOT EDIT. + +package indicator + +import () + +func (inc *TEMA) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *TEMA) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/tema_test.go b/pkg/indicator/tema_test.go new file mode 100644 index 0000000000..641153f402 --- /dev/null +++ b/pkg/indicator/tema_test.go @@ -0,0 +1,56 @@ +package indicator + +import ( + "encoding/json" + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" + "github.com/stretchr/testify/assert" +) + +/* +python: + +import pandas as pd +s = pd.Series([0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9]) +ma1 = s.ewm(span=16).mean() +ma2 = ma1.ewm(span=16).mean() +ma3 = ma2.ewm(span=16).mean() +result = (3 * ma1 - 3 * ma2 + ma3) +print(result) +*/ +func Test_TEMA(t *testing.T) { + var Delta = 4.3e-2 + var randomPrices = []byte(`[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]`) + var input []fixedpoint.Value + if err := json.Unmarshal(randomPrices, &input); err != nil { + panic(err) + } + tests := []struct { + name string + kLines []types.KLine + want float64 + next float64 + all int + }{ + { + name: "random_case", + kLines: buildKLines(input), + want: 7.163145, + next: 6.106229, + all: 50, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tema := TEMA{IntervalWindow: types.IntervalWindow{Window: 16}} + tema.calculateAndUpdate(tt.kLines) + last := tema.Last() + assert.InDelta(t, tt.want, last, Delta) + assert.InDelta(t, tt.next, tema.Index(1), Delta) + assert.Equal(t, tt.all, tema.Length()) + }) + } +} diff --git a/pkg/indicator/till.go b/pkg/indicator/till.go new file mode 100644 index 0000000000..73f97ead50 --- /dev/null +++ b/pkg/indicator/till.go @@ -0,0 +1,112 @@ +package indicator + +import ( + "github.com/c9s/bbgo/pkg/types" +) + +const defaultVolumeFactor = 0.7 + +// Refer: Tillson T3 Moving Average +// Refer URL: https://tradingpedia.com/forex-trading-indicator/t3-moving-average-indicator/ +//go:generate callbackgen -type TILL +type TILL struct { + types.IntervalWindow + VolumeFactor float64 + e1 *EWMA + e2 *EWMA + e3 *EWMA + e4 *EWMA + e5 *EWMA + e6 *EWMA + c1 float64 + c2 float64 + c3 float64 + c4 float64 + UpdateCallbacks []func(value float64) +} + +func (inc *TILL) Update(value float64) { + if inc.e1 == nil || inc.e1.Length() == 0 { + if inc.VolumeFactor == 0 { + inc.VolumeFactor = defaultVolumeFactor + } + inc.e1 = &EWMA{IntervalWindow: types.IntervalWindow{inc.Interval, inc.Window}} + inc.e2 = &EWMA{IntervalWindow: types.IntervalWindow{inc.Interval, inc.Window}} + inc.e3 = &EWMA{IntervalWindow: types.IntervalWindow{inc.Interval, inc.Window}} + inc.e4 = &EWMA{IntervalWindow: types.IntervalWindow{inc.Interval, inc.Window}} + inc.e5 = &EWMA{IntervalWindow: types.IntervalWindow{inc.Interval, inc.Window}} + inc.e6 = &EWMA{IntervalWindow: types.IntervalWindow{inc.Interval, inc.Window}} + square := inc.VolumeFactor * inc.VolumeFactor + cube := inc.VolumeFactor * square + inc.c1 = -cube + inc.c2 = 3.*square + 3.*cube + inc.c3 = -6.*square - 3*inc.VolumeFactor - 3*cube + inc.c4 = 1. + 3.*inc.VolumeFactor + cube + 3.*square + } + + inc.e1.Update(value) + inc.e2.Update(inc.e1.Last()) + inc.e3.Update(inc.e2.Last()) + inc.e4.Update(inc.e3.Last()) + inc.e5.Update(inc.e4.Last()) + inc.e6.Update(inc.e5.Last()) +} + +func (inc *TILL) Last() float64 { + if inc.e1 == nil || inc.e1.Length() == 0 { + return 0 + } + e3 := inc.e3.Last() + e4 := inc.e4.Last() + e5 := inc.e5.Last() + e6 := inc.e6.Last() + return inc.c1*e6 + inc.c2*e5 + inc.c3*e4 + inc.c4*e3 +} + +func (inc *TILL) Index(i int) float64 { + if inc.e1 == nil || inc.e1.Length() <= i { + return 0 + } + e3 := inc.e3.Index(i) + e4 := inc.e4.Index(i) + e5 := inc.e5.Index(i) + e6 := inc.e6.Index(i) + return inc.c1*e6 + inc.c2*e5 + inc.c3*e4 + inc.c4*e3 +} + +func (inc *TILL) Length() int { + if inc.e1 == nil { + return 0 + } + return inc.e1.Length() +} + +var _ types.Series = &TILL{} + +func (inc *TILL) calculateAndUpdate(allKLines []types.KLine) { + doable := false + if inc.e1 == nil { + doable = true + } + for _, k := range allKLines { + if !doable && k.StartTime.After(inc.e1.LastOpenTime) { + doable = true + } + if doable { + inc.Update(k.Close.Float64()) + inc.EmitUpdate(inc.Last()) + } + } +} + +func (inc *TILL) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *TILL) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/till_callbacks.go b/pkg/indicator/till_callbacks.go new file mode 100644 index 0000000000..53d89cb8d8 --- /dev/null +++ b/pkg/indicator/till_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type TILL"; DO NOT EDIT. + +package indicator + +import () + +func (inc *TILL) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *TILL) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/till_test.go b/pkg/indicator/till_test.go new file mode 100644 index 0000000000..4615a5dbe0 --- /dev/null +++ b/pkg/indicator/till_test.go @@ -0,0 +1,65 @@ +package indicator + +import ( + "encoding/json" + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" + "github.com/stretchr/testify/assert" +) + +/* +python: + +import pandas as pd +s = pd.Series([0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9]) +ma1 = s.ewm(span=16).mean() +ma2 = ma1.ewm(span=16).mean() +ma3 = ma2.ewm(span=16).mean() +ma4 = ma3.ewm(span=16).mean() +ma5 = ma4.ewm(span=16).mean() +ma6 = ma5.ewm(span=16).mean() +square = 0.7 * 0.7 +cube = 0.7 ** 3 +c1 = -cube +c2 = 3 * square + 3 * cube +c3 = -6 * square - 3 * 0.7 - 3 * cube +c4 = 1 + 3 * 0.7 + cube + 3 * square +result = (c1 * ma6 + c2 * ma5 + c3 * ma4 + c4 * ma3) +print(result) +*/ +func Test_TILL(t *testing.T) { + var Delta = 0.18 + var randomPrices = []byte(`[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]`) + var input []fixedpoint.Value + if err := json.Unmarshal(randomPrices, &input); err != nil { + panic(err) + } + tests := []struct { + name string + kLines []types.KLine + want float64 + next float64 + all int + }{ + { + name: "random_case", + kLines: buildKLines(input), + want: 4.528608, + next: 4.457134, + all: 50, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + till := TILL{IntervalWindow: types.IntervalWindow{Window: 16}} + till.calculateAndUpdate(tt.kLines) + last := till.Last() + assert.InDelta(t, tt.want, last, Delta) + assert.InDelta(t, tt.next, till.Index(1), Delta) + assert.Equal(t, tt.all, till.Length()) + }) + } +} diff --git a/pkg/indicator/tma.go b/pkg/indicator/tma.go new file mode 100644 index 0000000000..482f3936cd --- /dev/null +++ b/pkg/indicator/tma.go @@ -0,0 +1,73 @@ +package indicator + +import ( + "github.com/c9s/bbgo/pkg/types" +) + +// Refer: Triangular Moving Average +// Refer URL: https://ja.wikipedia.org/wiki/移動平均 +//go:generate callbackgen -type TMA +type TMA struct { + types.IntervalWindow + s1 *SMA + s2 *SMA + UpdateCallbacks []func(value float64) +} + +func (inc *TMA) Update(value float64) { + if inc.s1 == nil { + w := (inc.Window + 1) / 2 + inc.s1 = &SMA{IntervalWindow: types.IntervalWindow{inc.Interval, w}} + inc.s2 = &SMA{IntervalWindow: types.IntervalWindow{inc.Interval, w}} + } + + inc.s1.Update(value) + inc.s2.Update(inc.s1.Last()) +} + +func (inc *TMA) Last() float64 { + if inc.s2 == nil { + return 0 + } + return inc.s2.Last() +} + +func (inc *TMA) Index(i int) float64 { + if inc.s2 == nil { + return 0 + } + return inc.s2.Index(i) +} + +func (inc *TMA) Length() int { + if inc.s2 == nil { + return 0 + } + return inc.s2.Length() +} + +var _ types.Series = &TMA{} + +func (inc *TMA) calculateAndUpdate(allKLines []types.KLine) { + if inc.s1 == nil { + for _, k := range allKLines { + inc.Update(k.Close.Float64()) + inc.EmitUpdate(inc.Last()) + } + } else { + inc.Update(allKLines[len(allKLines)-1].Close.Float64()) + inc.EmitUpdate(inc.Last()) + } +} + +func (inc *TMA) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *TMA) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/tma_callbacks.go b/pkg/indicator/tma_callbacks.go new file mode 100644 index 0000000000..7c468f55d9 --- /dev/null +++ b/pkg/indicator/tma_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type TMA"; DO NOT EDIT. + +package indicator + +import () + +func (inc *TMA) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *TMA) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/util.go b/pkg/indicator/util.go new file mode 100644 index 0000000000..05f4c6a690 --- /dev/null +++ b/pkg/indicator/util.go @@ -0,0 +1,29 @@ +package indicator + +import "github.com/c9s/bbgo/pkg/types" + +type KLinePriceMapper func(k types.KLine) float64 + +func KLineOpenPriceMapper(k types.KLine) float64 { + return k.Open.Float64() +} + +func KLineClosePriceMapper(k types.KLine) float64 { + return k.Close.Float64() +} + +func KLineTypicalPriceMapper(k types.KLine) float64 { + return (k.High.Float64() + k.Low.Float64() + k.Close.Float64()) / 3. +} + +func MapKLinePrice(kLines []types.KLine, f KLinePriceMapper) (prices []float64) { + for _, k := range kLines { + prices = append(prices, f(k)) + } + + return prices +} + +type KLineWindowUpdater interface { + OnKLineWindowUpdate(func(interval types.Interval, window types.KLineWindow)) +} diff --git a/pkg/indicator/vidya.go b/pkg/indicator/vidya.go new file mode 100644 index 0000000000..658e89ac11 --- /dev/null +++ b/pkg/indicator/vidya.go @@ -0,0 +1,93 @@ +package indicator + +import ( + "math" + + "github.com/c9s/bbgo/pkg/types" +) + +// Refer: Variable Index Dynamic Average +// Refer URL: https://metatrader5.com/en/terminal/help/indicators/trend_indicators/vida +//go:generate callbackgen -type VIDYA +type VIDYA struct { + types.IntervalWindow + Values types.Float64Slice + input types.Float64Slice + + UpdateCallbacks []func(value float64) +} + +func (inc *VIDYA) Update(value float64) { + if inc.Values.Length() == 0 { + inc.Values.Push(value) + inc.input.Push(value) + return + } + inc.input.Push(value) + if len(inc.input) > MaxNumOfEWMA { + inc.input = inc.input[MaxNumOfEWMATruncateSize-1:] + } + /*upsum := 0. + downsum := 0. + for i := 0; i < inc.Window; i++ { + if len(inc.input) <= i+1 { + break + } + diff := inc.input.Index(i) - inc.input.Index(i+1) + if diff > 0 { + upsum += diff + } else { + downsum += -diff + } + + } + if upsum == 0 && downsum == 0 { + return + } + CMO := math.Abs((upsum - downsum) / (upsum + downsum))*/ + change := types.Change(&inc.input) + CMO := math.Abs(types.Sum(change, inc.Window) / types.Sum(types.Abs(change), inc.Window)) + alpha := 2. / float64(inc.Window+1) + inc.Values.Push(value*alpha*CMO + inc.Values.Last()*(1.-alpha*CMO)) + if inc.Values.Length() > MaxNumOfEWMA { + inc.Values = inc.Values[MaxNumOfEWMATruncateSize-1:] + } +} + +func (inc *VIDYA) Last() float64 { + return inc.Values.Last() +} + +func (inc *VIDYA) Index(i int) float64 { + return inc.Values.Index(i) +} + +func (inc *VIDYA) Length() int { + return inc.Values.Length() +} + +var _ types.Series = &VIDYA{} + +func (inc *VIDYA) calculateAndUpdate(allKLines []types.KLine) { + if inc.input.Length() == 0 { + for _, k := range allKLines { + inc.Update(k.Close.Float64()) + inc.EmitUpdate(inc.Last()) + } + } else { + inc.Update(allKLines[len(allKLines)-1].Close.Float64()) + inc.EmitUpdate(inc.Last()) + } +} + +func (inc *VIDYA) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *VIDYA) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/vidya_callbacks.go b/pkg/indicator/vidya_callbacks.go new file mode 100644 index 0000000000..b78e797c45 --- /dev/null +++ b/pkg/indicator/vidya_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type VIDYA"; DO NOT EDIT. + +package indicator + +import () + +func (inc *VIDYA) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *VIDYA) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/vidya_test.go b/pkg/indicator/vidya_test.go new file mode 100644 index 0000000000..8ac1df255d --- /dev/null +++ b/pkg/indicator/vidya_test.go @@ -0,0 +1,19 @@ +package indicator + +import ( + "testing" + + "github.com/c9s/bbgo/pkg/types" + "github.com/stretchr/testify/assert" +) + +func Test_VIDYA(t *testing.T) { + vidya := &VIDYA{IntervalWindow: types.IntervalWindow{Window: 16}} + vidya.Update(1) + assert.Equal(t, vidya.Last(), 1.) + vidya.Update(2) + newV := 2./17.*2. + 1.*(1.-2./17.) + assert.Equal(t, vidya.Last(), newV) + vidya.Update(1) + assert.Equal(t, vidya.Last(), vidya.Index(1)) +} diff --git a/pkg/indicator/volatility.go b/pkg/indicator/volatility.go new file mode 100644 index 0000000000..aae62e2836 --- /dev/null +++ b/pkg/indicator/volatility.go @@ -0,0 +1,97 @@ +package indicator + +import ( + "fmt" + "math" + "time" + + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/types" +) + +const MaxNumOfVOL = 5_000 +const MaxNumOfVOLTruncateSize = 100 + +//var zeroTime time.Time + +//go:generate callbackgen -type VOLATILITY +type VOLATILITY struct { + types.IntervalWindow + Values types.Float64Slice + EndTime time.Time + + UpdateCallbacks []func(value float64) +} + +func (inc *VOLATILITY) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *VOLATILITY) calculateAndUpdate(klines []types.KLine) { + if len(klines) < inc.Window { + return + } + + var end = len(klines) - 1 + var lastKLine = klines[end] + + if inc.EndTime != zeroTime && lastKLine.GetEndTime().Before(inc.EndTime) { + return + } + + var recentT = klines[end-(inc.Window-1) : end+1] + + volatility, err := calculateVOLATILITY(recentT, inc.Window, KLineClosePriceMapper) + if err != nil { + log.WithError(err).Error("can not calculate volatility") + return + } + inc.Values.Push(volatility) + + if len(inc.Values) > MaxNumOfVOL { + inc.Values = inc.Values[MaxNumOfVOLTruncateSize-1:] + } + + inc.EndTime = klines[end].GetEndTime().Time() + + inc.EmitUpdate(volatility) +} + +func (inc *VOLATILITY) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *VOLATILITY) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +func calculateVOLATILITY(klines []types.KLine, window int, priceF KLinePriceMapper) (float64, error) { + length := len(klines) + if length == 0 || length < window { + return 0.0, fmt.Errorf("insufficient elements for calculating VOL with window = %d", window) + } + + sum := 0.0 + for _, k := range klines { + sum += priceF(k) + } + + avg := sum / float64(window) + sv := 0.0 // sum of variance + + for _, j := range klines { + // The use of Pow math function func Pow(x, y float64) float64 + sv += math.Pow(priceF(j)-avg, 2) + } + // The use of Sqrt math function func Sqrt(x float64) float64 + sd := math.Sqrt(sv / float64(len(klines))) + return sd, nil +} diff --git a/pkg/indicator/volatility_callbacks.go b/pkg/indicator/volatility_callbacks.go new file mode 100644 index 0000000000..9f5311d757 --- /dev/null +++ b/pkg/indicator/volatility_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type VOLATILITY"; DO NOT EDIT. + +package indicator + +import () + +func (inc *VOLATILITY) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *VOLATILITY) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/vwap.go b/pkg/indicator/vwap.go new file mode 100644 index 0000000000..7fcac717aa --- /dev/null +++ b/pkg/indicator/vwap.go @@ -0,0 +1,102 @@ +package indicator + +import ( + "time" + + "github.com/c9s/bbgo/pkg/types" +) + +/* +vwap implements the volume weighted average price (VWAP) indicator: + +Volume Weighted Average Price (VWAP) Definition +- https://www.investopedia.com/terms/v/vwap.asp + +Volume-Weighted Average Price (VWAP) Explained +- https://academy.binance.com/en/articles/volume-weighted-average-price-vwap-explained +*/ +//go:generate callbackgen -type VWAP +type VWAP struct { + types.IntervalWindow + Values types.Float64Slice + Prices types.Float64Slice + Volumes types.Float64Slice + WeightedSum float64 + VolumeSum float64 + + EndTime time.Time + UpdateCallbacks []func(value float64) +} + +func (inc *VWAP) Update(price, volume float64) { + inc.Prices.Push(price) + inc.Volumes.Push(volume) + + if inc.Window != 0 && len(inc.Prices) > inc.Window { + popIndex := len(inc.Prices) - inc.Window - 1 + inc.WeightedSum -= inc.Prices[popIndex] * inc.Volumes[popIndex] + inc.VolumeSum -= inc.Volumes[popIndex] + } + + inc.WeightedSum += price * volume + inc.VolumeSum += volume + + vwap := inc.WeightedSum / inc.VolumeSum + inc.Values.Push(vwap) +} + +func (inc *VWAP) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *VWAP) Index(i int) float64 { + length := len(inc.Values) + if length == 0 || length-i-1 < 0 { + return 0 + } + + return inc.Values[length-i-1] +} + +func (inc *VWAP) Length() int { + return len(inc.Values) +} + +var _ types.Series = &VWAP{} + +func (inc *VWAP) calculateAndUpdate(kLines []types.KLine) { + var priceF = KLineTypicalPriceMapper + + for _, k := range kLines { + if inc.EndTime != zeroTime && !k.EndTime.After(inc.EndTime) { + continue + } + inc.Update(priceF(k), k.Volume.Float64()) + } + + inc.EmitUpdate(inc.Last()) + inc.EndTime = kLines[len(kLines)-1].EndTime.Time() +} + +func (inc *VWAP) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *VWAP) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +func CalculateVWAP(klines []types.KLine, priceF KLinePriceMapper, window int) float64 { + vwap := VWAP{IntervalWindow: types.IntervalWindow{Window: window}} + for _, k := range klines { + vwap.Update(priceF(k), k.Volume.Float64()) + } + return vwap.Last() +} diff --git a/pkg/indicator/vwap_callbacks.go b/pkg/indicator/vwap_callbacks.go new file mode 100644 index 0000000000..9a235d17ae --- /dev/null +++ b/pkg/indicator/vwap_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type VWAP"; DO NOT EDIT. + +package indicator + +import () + +func (V *VWAP) OnUpdate(cb func(value float64)) { + V.UpdateCallbacks = append(V.UpdateCallbacks, cb) +} + +func (V *VWAP) EmitUpdate(value float64) { + for _, cb := range V.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/vwap_test.go b/pkg/indicator/vwap_test.go new file mode 100644 index 0000000000..d168bb938f --- /dev/null +++ b/pkg/indicator/vwap_test.go @@ -0,0 +1,74 @@ +package indicator + +import ( + "encoding/json" + "math" + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func Test_calculateVWAP(t *testing.T) { + var trivialPrices = []byte(`[0]`) + var trivialVolumes = []byte(`[1]`) + var easyPrices = []byte(`[1, 2, 3]`) + var easyVolumes = []byte(`[4, 5, 6]`) + var windowPrices = []byte(`[1, 2, 3, 4]`) + var windowVolumes = []byte(`[4, 5, 6, 7]`) + var randomPrices = []byte(`[0.6046702879796195, 0.9405190880450124, 0.6645700532184904, 0.4377241871869802, 0.4246474970712657, 0.6868330728671094, 0.06564701921747622, 0.15652925473279125, 0.09697951891448456, 0.3009218605852871]`) + var randomVolumes = []byte(`[0.5152226285020653, 0.8136499609900968, 0.21427387258237493, 0.380667189299686, 0.31806817433032986, 0.4688998449024232, 0.2830441511804452, 0.2931118573368158, 0.6790946759202162, 0.2185630525927643]`) + + buildKLines := func(pb, vb []byte) (kLines []types.KLine) { + var prices, volumes []fixedpoint.Value + _ = json.Unmarshal(pb, &prices) + _ = json.Unmarshal(vb, &volumes) + for i, p := range prices { + kLines = append(kLines, types.KLine{High: p, Low: p, Close: p, Volume: volumes[i]}) + } + return kLines + } + + tests := []struct { + name string + kLines []types.KLine + window int + want float64 + }{ + { + name: "trivial_case", + kLines: buildKLines(trivialPrices, trivialVolumes), + window: 0, + want: 0.0, + }, + { + name: "easy_case", + kLines: buildKLines(easyPrices, easyVolumes), + window: 0, + want: (1*4 + 2*5 + 3*6) / float64(4+5+6), + }, + { + name: "window_case", + kLines: buildKLines(windowPrices, windowVolumes), + window: 3, + want: (2*5 + 3*6 + 4*7) / float64(5+6+7), + }, + { + name: "random_case", + kLines: buildKLines(randomPrices, randomVolumes), + window: 0, + want: 0.48727133857423566, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + priceF := KLineTypicalPriceMapper + got := CalculateVWAP(tt.kLines, priceF, tt.window) + diff := math.Trunc((got-tt.want)*100) / 100 + if diff != 0 { + t.Errorf("calculateVWAP() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/indicator/vwma.go b/pkg/indicator/vwma.go new file mode 100644 index 0000000000..131e2f5df0 --- /dev/null +++ b/pkg/indicator/vwma.go @@ -0,0 +1,106 @@ +package indicator + +import ( + "time" + + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/types" +) + +/* +vwma implements the volume weighted moving average (VWMA) indicator: + +Calculation: + pv = element-wise multiplication of close prices and volumes + VWMA = SMA(pv, window) / SMA(volumes, window) + +Volume Weighted Moving Average +- https://www.motivewave.com/studies/volume_weighted_moving_average.htm +*/ +//go:generate callbackgen -type VWMA +type VWMA struct { + types.IntervalWindow + Values types.Float64Slice + EndTime time.Time + + UpdateCallbacks []func(value float64) +} + +func (inc *VWMA) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *VWMA) Index(i int) float64 { + length := len(inc.Values) + if length == 0 || length-i-1 < 0 { + return 0 + } + return inc.Values[length-i-1] +} + +func (inc *VWMA) Length() int { + return len(inc.Values) +} + +var _ types.Series = &VWMA{} + +func KLinePriceVolumeMapper(k types.KLine) float64 { + return k.Close.Mul(k.Volume).Float64() +} + +func KLineVolumeMapper(k types.KLine) float64 { + return k.Volume.Float64() +} + +func (inc *VWMA) calculateAndUpdate(kLines []types.KLine) { + if len(kLines) < inc.Window { + return + } + + var index = len(kLines) - 1 + var kline = kLines[index] + + if inc.EndTime != zeroTime && kline.EndTime.Before(inc.EndTime) { + return + } + + var recentK = kLines[index-(inc.Window-1) : index+1] + + pv, err := calculateSMA(recentK, inc.Window, KLinePriceVolumeMapper) + if err != nil { + log.WithError(err).Error("price x volume SMA error") + return + } + v, err := calculateSMA(recentK, inc.Window, KLineVolumeMapper) + if err != nil { + log.WithError(err).Error("volume SMA error") + return + } + + vwma := pv / v + inc.Values.Push(vwma) + + if len(inc.Values) > MaxNumOfSMA { + inc.Values = inc.Values[MaxNumOfSMATruncateSize-1:] + } + + inc.EndTime = kLines[index].EndTime.Time() + + inc.EmitUpdate(vwma) +} + +func (inc *VWMA) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *VWMA) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/vwma_callbacks.go b/pkg/indicator/vwma_callbacks.go new file mode 100644 index 0000000000..5be9f70f01 --- /dev/null +++ b/pkg/indicator/vwma_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type VWMA"; DO NOT EDIT. + +package indicator + +import () + +func (inc *VWMA) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *VWMA) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/wwma.go b/pkg/indicator/wwma.go new file mode 100644 index 0000000000..13fd1b8d19 --- /dev/null +++ b/pkg/indicator/wwma.go @@ -0,0 +1,88 @@ +package indicator + +import ( + "github.com/c9s/bbgo/pkg/types" + "time" +) + +// Refer: Welles Wilder's Moving Average +// Refer URL: http://fxcorporate.com/help/MS/NOTFIFO/i_WMA.html +// TODO: Cannot see any difference between RMA and this + +const MaxNumOfWWMA = 5_000 +const MaxNumOfWWMATruncateSize = 100 + +//go:generate callbackgen -type WWMA +type WWMA struct { + types.IntervalWindow + Values types.Float64Slice + LastOpenTime time.Time + + UpdateCallbacks []func(value float64) +} + +func (inc *WWMA) Update(value float64) { + if len(inc.Values) == 0 { + inc.Values.Push(value) + return + } else if len(inc.Values) > MaxNumOfWWMA { + inc.Values = inc.Values[MaxNumOfWWMATruncateSize-1:] + } + + last := inc.Last() + wma := last + (value-last)/float64(inc.Window) + inc.Values.Push(wma) +} + +func (inc *WWMA) Last() float64 { + if len(inc.Values) == 0 { + return 0 + } + + return inc.Values[len(inc.Values)-1] +} + +func (inc *WWMA) Index(i int) float64 { + if i >= len(inc.Values) { + return 0 + } + + return inc.Values[len(inc.Values)-1-i] +} + +func (inc *WWMA) Length() int { + return len(inc.Values) +} + +func (inc *WWMA) calculateAndUpdate(allKLines []types.KLine) { + if len(allKLines) < inc.Window { + // we can't calculate + return + } + + doable := false + for _, k := range allKLines { + if !doable && k.StartTime.After(inc.LastOpenTime) { + doable = true + } + if doable { + inc.Update(k.Close.Float64()) + inc.LastOpenTime = k.StartTime.Time() + inc.EmitUpdate(inc.Last()) + } + } +} + +func (inc *WWMA) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *WWMA) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +var _ types.Series = &WWMA{} diff --git a/pkg/indicator/wwma_callbacks.go b/pkg/indicator/wwma_callbacks.go new file mode 100644 index 0000000000..2c5f57b18e --- /dev/null +++ b/pkg/indicator/wwma_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type WWMA"; DO NOT EDIT. + +package indicator + +import () + +func (inc *WWMA) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *WWMA) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/zlema.go b/pkg/indicator/zlema.go new file mode 100644 index 0000000000..4ed97d84ac --- /dev/null +++ b/pkg/indicator/zlema.go @@ -0,0 +1,82 @@ +package indicator + +import ( + "github.com/c9s/bbgo/pkg/types" +) + +// Refer: Zero Lag Exponential Moving Average +// Refer URL: https://en.wikipedia.org/wiki/Zero_lag_exponential_moving_average + +//go:generate callbackgen -type ZLEMA +type ZLEMA struct { + types.IntervalWindow + + data types.Float64Slice + zlema *EWMA + lag int + + UpdateCallbacks []func(value float64) +} + +func (inc *ZLEMA) Index(i int) float64 { + if inc.zlema == nil { + return 0 + } + return inc.zlema.Index(i) +} + +func (inc *ZLEMA) Last() float64 { + if inc.zlema == nil { + return 0 + } + return inc.zlema.Last() +} + +func (inc *ZLEMA) Length() int { + if inc.zlema == nil { + return 0 + } + return inc.zlema.Length() +} + +func (inc *ZLEMA) Update(value float64) { + if inc.lag == 0 || inc.zlema == nil { + inc.zlema = &EWMA{IntervalWindow: types.IntervalWindow{inc.Interval, inc.Window}} + inc.lag = int((float64(inc.Window)-1.)/2. + 0.5) + } + inc.data.Push(value) + if len(inc.data) > MaxNumOfEWMA { + inc.data = inc.data[MaxNumOfEWMATruncateSize-1:] + } + if inc.lag >= inc.data.Length() { + return + } + emaData := 2.*value - inc.data[len(inc.data)-1-inc.lag] + inc.zlema.Update(emaData) +} + +var _ types.Series = &ZLEMA{} + +func (inc *ZLEMA) calculateAndUpdate(allKLines []types.KLine) { + if inc.zlema == nil { + for _, k := range allKLines { + inc.Update(k.Close.Float64()) + inc.EmitUpdate(inc.Last()) + } + } else { + inc.Update(allKLines[len(allKLines)-1].Close.Float64()) + inc.EmitUpdate(inc.Last()) + } +} + +func (inc *ZLEMA) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *ZLEMA) Bind(updater KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} diff --git a/pkg/indicator/zlema_callbacks.go b/pkg/indicator/zlema_callbacks.go new file mode 100644 index 0000000000..d70147699f --- /dev/null +++ b/pkg/indicator/zlema_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type ZLEMA"; DO NOT EDIT. + +package indicator + +import () + +func (inc *ZLEMA) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *ZLEMA) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/indicator/zlema_test.go b/pkg/indicator/zlema_test.go new file mode 100644 index 0000000000..4b0e546ab2 --- /dev/null +++ b/pkg/indicator/zlema_test.go @@ -0,0 +1,55 @@ +package indicator + +import ( + "encoding/json" + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" + "github.com/stretchr/testify/assert" +) + +/* +python: + +import pandas as pd +s = pd.Series([0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9]) +lag = int((16-1)/2 + 0.5) +emadata = s + (s - s.shift(lag)) +result = emadata.ewm(span=16).mean() +print(result) +*/ +func Test_ZLEMA(t *testing.T) { + var Delta = 6.5e-2 + var randomPrices = []byte(`[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]`) + var input []fixedpoint.Value + if err := json.Unmarshal(randomPrices, &input); err != nil { + panic(err) + } + tests := []struct { + name string + kLines []types.KLine + want float64 + next float64 + all int + }{ + { + name: "random_case", + kLines: buildKLines(input), + want: 6.622881, + next: 5.231044, + all: 42, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + zlema := ZLEMA{IntervalWindow: types.IntervalWindow{Window: 16}} + zlema.calculateAndUpdate(tt.kLines) + last := zlema.Last() + assert.InDelta(t, tt.want, last, Delta) + assert.InDelta(t, tt.next, zlema.Index(1), Delta) + assert.Equal(t, tt.all, zlema.Length()) + }) + } +} diff --git a/pkg/interact/auth.go b/pkg/interact/auth.go new file mode 100644 index 0000000000..343ff382a4 --- /dev/null +++ b/pkg/interact/auth.go @@ -0,0 +1,129 @@ +package interact + +import ( + "errors" + "os" + "time" + + "github.com/pquerna/otp" + "github.com/pquerna/otp/totp" + log "github.com/sirupsen/logrus" +) + +type AuthMode string + +const ( + AuthModeOTP AuthMode = "OTP" + AuthModeToken AuthMode = "TOKEN" +) + +var ErrAuthenticationFailed = errors.New("authentication failed") + +type Authorizer interface { + StartAuthorizing() + Authorize() error +} + +type AuthInteract struct { + Strict bool `json:"strict,omitempty"` + + Mode AuthMode `json:"authMode"` + + Token string `json:"authToken,omitempty"` + + OneTimePasswordKey *otp.Key `json:"otpKey,omitempty"` +} + +func (it *AuthInteract) Commands(interact *Interact) { + if it.Strict { + // generate a one-time-use otp + // pragma: allowlist nextline secret + if it.OneTimePasswordKey == nil { + opts := totp.GenerateOpts{ + Issuer: "interact", + AccountName: os.Getenv("USER"), + Period: 30, + } + log.Infof("[interact] one-time password key is not configured, generating one with %+v", opts) + key, err := totp.Generate(opts) + if err != nil { + panic(err) + } + // pragma: allowlist nextline secret + it.OneTimePasswordKey = key + } + interact.Command("/auth", "authorize", func(reply Reply, session Session) error { + reply.Message("Please enter your authentication token") + session.SetAuthorizing(true) + return nil + }).Next(func(token string, reply Reply) error { + if token == it.Token { + reply.Message("Token passed, please enter your one-time password") + + code, err := totp.GenerateCode(it.OneTimePasswordKey.Secret(), time.Now()) + if err != nil { + return err + } + + log.Infof("[interact] ======================================") + log.Infof("[interact] your one-time password code: %s", code) + log.Infof("[interact] ======================================") + return nil + } + + return ErrAuthenticationFailed + }).NamedNext(StateAuthenticated, func(code string, reply Reply, session Session) error { + if totp.Validate(code, it.OneTimePasswordKey.Secret()) { + reply.Message("Great! You're authenticated!") + session.SetOriginState(StateAuthenticated) + session.SetAuthorized() + return nil + } + + reply.Message("Incorrect authentication code") + return ErrAuthenticationFailed + }) + } else { + interact.Command("/auth", "authorize", func(reply Reply, session Session) error { + switch it.Mode { + case AuthModeToken: + session.SetAuthorizing(true) + reply.Message("Enter your authentication token") + + case AuthModeOTP: + session.SetAuthorizing(true) + reply.Message("Enter your one-time password") + + default: + log.Warnf("unexpected auth mode: %s", it.Mode) + } + return nil + }).NamedNext(StateAuthenticated, func(code string, reply Reply, session Session) error { + switch it.Mode { + case AuthModeToken: + if code == it.Token { + reply.Message("Great! You're authenticated!") + session.SetOriginState(StateAuthenticated) + session.SetAuthorized() + return nil + } + reply.Message("Incorrect authentication token") + + case AuthModeOTP: + if totp.Validate(code, it.OneTimePasswordKey.Secret()) { + reply.Message("Great! You're authenticated!") + session.SetOriginState(StateAuthenticated) + session.SetAuthorized() + return nil + } + reply.Message("Incorrect one-time pass code") + + default: + log.Warnf("unexpected auth mode: %s", it.Mode) + } + + return ErrAuthenticationFailed + }) + } + +} diff --git a/pkg/interact/command.go b/pkg/interact/command.go new file mode 100644 index 0000000000..9155730aad --- /dev/null +++ b/pkg/interact/command.go @@ -0,0 +1,89 @@ +package interact + +import "strconv" + +// Command is a domain specific language syntax helper +// It's used for helping developer define the state and transition function +type Command struct { + // Name is the command name + Name string + + // Desc is the command description + Desc string + + // StateF is the command handler function + F interface{} + + stateID int + states map[State]State + statesFunc map[State]interface{} + initState, lastState State +} + +func NewCommand(name, desc string, f interface{}) *Command { + c := &Command{ + Name: name, + Desc: desc, + F: f, + states: make(map[State]State), + statesFunc: make(map[State]interface{}), + initState: State(name + "_" + strconv.Itoa(0)), + } + return c.Next(f) +} + +// Transit defines the state transition that is not related to the last defined state. +func (c *Command) Transit(state1, state2 State, f interface{}) *Command { + c.states[state1] = state2 + c.statesFunc[state1] = f + return c +} + +func (c *Command) NamedNext(n State, f interface{}) *Command { + var curState State + if c.lastState == "" { + curState = State(c.Name + "_" + strconv.Itoa(c.stateID)) + } else { + curState = c.lastState + } + + nextState := n + c.states[curState] = nextState + c.statesFunc[curState] = f + c.lastState = nextState + return c +} + +func (c *Command) Cycle(f interface{}) *Command { + var curState State + if c.lastState == "" { + curState = State(c.Name + "_" + strconv.Itoa(c.stateID)) + } else { + curState = c.lastState + } + + nextState := curState + c.states[curState] = nextState + c.statesFunc[curState] = f + c.lastState = nextState + return c +} + +// Next defines the next state with the transition function from the last defined state. +func (c *Command) Next(f interface{}) *Command { + var curState State + if c.lastState == "" { + curState = State(c.Name + "_" + strconv.Itoa(c.stateID)) + } else { + curState = c.lastState + } + + // generate the next state by the stateID + c.stateID++ + nextState := State(c.Name + "_" + strconv.Itoa(c.stateID)) + + c.states[curState] = nextState + c.statesFunc[curState] = f + c.lastState = nextState + return c +} diff --git a/pkg/interact/default.go b/pkg/interact/default.go new file mode 100644 index 0000000000..4174122e6e --- /dev/null +++ b/pkg/interact/default.go @@ -0,0 +1,21 @@ +package interact + +import "context" + +var defaultInteraction = New() + +func Default() *Interact { + return defaultInteraction +} + +func AddMessenger(messenger Messenger) { + defaultInteraction.AddMessenger(messenger) +} + +func AddCustomInteraction(custom CustomInteraction) { + defaultInteraction.AddCustomInteraction(custom) +} + +func Start(ctx context.Context) error { + return defaultInteraction.Start(ctx) +} diff --git a/pkg/interact/interact.go b/pkg/interact/interact.go new file mode 100644 index 0000000000..a19b562103 --- /dev/null +++ b/pkg/interact/interact.go @@ -0,0 +1,264 @@ +package interact + +import ( + "context" + "fmt" + "time" + + log "github.com/sirupsen/logrus" +) + +type CustomInteraction interface { + Commands(interact *Interact) +} + +type Initializer interface { + Initialize() error +} + +type Messenger interface { + TextMessageResponder + CommandResponder + Start(ctx context.Context) +} + +type Session interface { + ID() string + SetOriginState(state State) + GetOriginState() State + SetState(state State) + GetState() State + IsAuthorized() bool + SetAuthorized() + SetAuthorizing(b bool) +} + +// Interact implements the interaction between bot and message software. +type Interact struct { + startTime time.Time + + // commands is the default public command map + commands map[string]*Command + + // privateCommands is the private command map, need auth + privateCommands map[string]*Command + + states map[State]State + statesFunc map[State]interface{} + + customInteractions []CustomInteraction + + messengers []Messenger +} + +func New() *Interact { + return &Interact{ + startTime: time.Now(), + commands: make(map[string]*Command), + privateCommands: make(map[string]*Command), + states: make(map[State]State), + statesFunc: make(map[State]interface{}), + } +} + +func (it *Interact) AddCustomInteraction(custom CustomInteraction) { + custom.Commands(it) + it.customInteractions = append(it.customInteractions, custom) +} + +func (it *Interact) PrivateCommand(command, desc string, f interface{}) *Command { + cmd := NewCommand(command, desc, f) + it.privateCommands[command] = cmd + return cmd +} + +func (it *Interact) Command(command string, desc string, f interface{}) *Command { + cmd := NewCommand(command, desc, f) + it.commands[command] = cmd + return cmd +} + +func (it *Interact) getNextState(session Session, currentState State) (nextState State, final bool) { + var ok bool + final = false + nextState, ok = it.states[currentState] + if ok { + // check if it's the final state + if _, hasTransition := it.statesFunc[nextState]; !hasTransition { + final = true + } + + return nextState, final + } + + // state not found, return to the origin state + return session.GetOriginState(), final +} + +func (it *Interact) handleResponse(session Session, text string, ctxObjects ...interface{}) error { + // We only need response when executing a command + switch session.GetState() { + case StatePublic, StateAuthenticated: + return nil + + } + + args := parseCommand(text) + + state := session.GetState() + f, ok := it.statesFunc[state] + if !ok { + return fmt.Errorf("state function of %s is not defined", state) + } + + ctxObjects = append(ctxObjects, session) + _, err := parseFuncArgsAndCall(f, args, ctxObjects...) + if err != nil { + return err + } + + nextState, end := it.getNextState(session, state) + if end { + session.SetState(session.GetOriginState()) + return nil + } + + session.SetState(nextState) + return nil +} + +func (it *Interact) getCommand(session Session, command string) (*Command, error) { + if session.IsAuthorized() { + if cmd, ok := it.privateCommands[command]; ok { + return cmd, nil + } + } else { + if _, ok := it.privateCommands[command]; ok { + return nil, fmt.Errorf("private command can not be executed in the public mode, type /auth to get authorized") + } + } + + // find any public command + if cmd, ok := it.commands[command]; ok { + return cmd, nil + } + + return nil, fmt.Errorf("command %s not found", command) +} + +func (it *Interact) runCommand(session Session, command string, args []string, ctxObjects ...interface{}) error { + cmd, err := it.getCommand(session, command) + if err != nil { + return err + } + + ctxObjects = append(ctxObjects, session) + session.SetState(cmd.initState) + if _, err := parseFuncArgsAndCall(cmd.F, args, ctxObjects...); err != nil { + return err + } + + // if we can successfully execute the command, then we can go to the next state. + state := session.GetState() + nextState, end := it.getNextState(session, state) + if end { + session.SetState(session.GetOriginState()) + return nil + } + + session.SetState(nextState) + return nil +} + +func (it *Interact) AddMessenger(messenger Messenger) { + // pass Responder function + messenger.SetTextMessageResponder(func(session Session, message string, reply Reply, ctxObjects ...interface{}) error { + return it.handleResponse(session, message, append(ctxObjects, reply)...) + }) + it.messengers = append(it.messengers, messenger) +} + +// builtin initializes the built-in commands +func (it *Interact) builtin() error { + it.Command("/uptime", "show bot uptime", func(reply Reply) error { + uptime := time.Since(it.startTime) + reply.Message(fmt.Sprintf("uptime %s", uptime)) + return nil + }) + + return nil +} + +func (it *Interact) init() error { + + if err := it.builtin(); err != nil { + return err + } + + if err := it.registerCommands(it.commands); err != nil { + return err + } + + if err := it.registerCommands(it.privateCommands); err != nil { + return err + } + + return nil +} + +func (it *Interact) registerCommands(commands map[string]*Command) error { + for n, cmd := range commands { + for s1, s2 := range cmd.states { + if _, exist := it.states[s1]; exist { + return fmt.Errorf("state %s already exists", s1) + } + + it.states[s1] = s2 + } + for s, f := range cmd.statesFunc { + it.statesFunc[s] = f + } + + // register commands to the service + if len(it.messengers) == 0 { + return fmt.Errorf("messenger is not set") + } + + // commandName is used in the closure, we need to copy the variable + commandName := n + for _, messenger := range it.messengers { + messenger.AddCommand(cmd, func(session Session, message string, reply Reply, ctxObjects ...interface{}) error { + args := parseCommand(message) + return it.runCommand(session, commandName, args, append(ctxObjects, reply)...) + }) + } + } + return nil +} + +func (it *Interact) Start(ctx context.Context) error { + if len(it.messengers) == 0 { + log.Warn("messenger is not set, skip initializing") + return nil + } + + if err := it.init(); err != nil { + return err + } + + for _, custom := range it.customInteractions { + log.Infof("checking %T custom interaction...", custom) + if initializer, ok := custom.(Initializer); ok { + log.Infof("initializing %T custom interaction...", custom) + if err := initializer.Initialize(); err != nil { + return err + } + } + } + + // TODO: use go routine and context + for _, m := range it.messengers { + go m.Start(ctx) + } + return nil +} diff --git a/pkg/interact/interact_test.go b/pkg/interact/interact_test.go new file mode 100644 index 0000000000..bd08282405 --- /dev/null +++ b/pkg/interact/interact_test.go @@ -0,0 +1,143 @@ +package interact + +import ( + "bytes" + "errors" + "io" + "testing" + + "github.com/stretchr/testify/assert" + tb "gopkg.in/tucnak/telebot.v2" +) + +func Test_parseFuncArgsAndCall_NoErrorFunction(t *testing.T) { + noErrorFunc := func(a string, b float64, c bool) error { + assert.Equal(t, "BTCUSDT", a) + assert.Equal(t, 0.123, b) + assert.Equal(t, true, c) + return nil + } + + _, err := parseFuncArgsAndCall(noErrorFunc, []string{"BTCUSDT", "0.123", "true"}) + assert.NoError(t, err) +} + +func Test_parseFuncArgsAndCall_ErrorFunction(t *testing.T) { + errorFunc := func(a string, b float64) error { + return errors.New("error") + } + + _, err := parseFuncArgsAndCall(errorFunc, []string{"BTCUSDT", "0.123"}) + assert.Error(t, err) +} + +func Test_parseFuncArgsAndCall_InterfaceInjection(t *testing.T) { + f := func(w io.Writer, a string, b float64) error { + _, err := w.Write([]byte("123")) + return err + } + + buf := bytes.NewBuffer(nil) + _, err := parseFuncArgsAndCall(f, []string{"BTCUSDT", "0.123"}, buf) + assert.NoError(t, err) + assert.Equal(t, "123", buf.String()) +} + +func Test_parseCommand(t *testing.T) { + args := parseCommand(`closePosition "BTC USDT" 3.1415926 market`) + t.Logf("args: %+v", args) + for i, a := range args { + t.Logf("args(%d): %#v", i, a) + } + + assert.Equal(t, 4, len(args)) + assert.Equal(t, "closePosition", args[0]) + assert.Equal(t, "BTC USDT", args[1]) + assert.Equal(t, "3.1415926", args[2]) + assert.Equal(t, "market", args[3]) +} + +type closePositionTask struct { + symbol string + percentage float64 + confirmed bool +} + +type TestInteraction struct { + closePositionTask closePositionTask +} + +func (m *TestInteraction) Commands(interact *Interact) { + interact.Command("/closePosition", "", func(reply Reply) error { + // send symbol options + return nil + }).Next(func(symbol string) error { + // get symbol from user + m.closePositionTask.symbol = symbol + + // send percentage options + return nil + }).Next(func(percentage float64) error { + // get percentage from user + m.closePositionTask.percentage = percentage + + // send confirmation + return nil + }).Next(func(confirmed bool) error { + m.closePositionTask.confirmed = confirmed + // call position close + + // reply result + return nil + }) +} + +func TestCustomInteraction(t *testing.T) { + b, err := tb.NewBot(tb.Settings{ + Offline: true, + }) + if !assert.NoError(t, err, "should have bot setup without error") { + return + } + + globalInteraction := New() + + telegram := &Telegram{ + Bot: b, + } + globalInteraction.AddMessenger(telegram) + + testInteraction := &TestInteraction{} + testInteraction.Commands(globalInteraction) + + err = globalInteraction.init() + assert.NoError(t, err) + + m := &tb.Message{ + Chat: &tb.Chat{ID: 22}, + Sender: &tb.User{ID: 999}, + } + session := telegram.loadSession(m) + err = globalInteraction.runCommand(session, "/closePosition", []string{}, telegram.newReply(session)) + assert.NoError(t, err) + + assert.Equal(t, State("/closePosition_1"), session.CurrentState) + + err = globalInteraction.handleResponse(session, "BTCUSDT", telegram.newReply(session)) + assert.NoError(t, err) + assert.Equal(t, State("/closePosition_2"), session.CurrentState) + + err = globalInteraction.handleResponse(session, "0.20", telegram.newReply(session)) + assert.NoError(t, err) + assert.Equal(t, State("/closePosition_3"), session.CurrentState) + + err = globalInteraction.handleResponse(session, "true", telegram.newReply(session)) + assert.NoError(t, err) + assert.Equal(t, State("public"), session.CurrentState) + + assert.Equal(t, closePositionTask{ + symbol: "BTCUSDT", + percentage: 0.2, + confirmed: true, + }, testInteraction.closePositionTask) +} diff --git a/pkg/interact/parse.go b/pkg/interact/parse.go new file mode 100644 index 0000000000..db4f3d1fd1 --- /dev/null +++ b/pkg/interact/parse.go @@ -0,0 +1,136 @@ +package interact + +import ( + "reflect" + "strconv" + "strings" + "text/scanner" + + "github.com/mattn/go-shellwords" + log "github.com/sirupsen/logrus" +) + +func parseFuncArgsAndCall(f interface{}, args []string, objects ...interface{}) (State, error) { + fv := reflect.ValueOf(f) + ft := reflect.TypeOf(f) + + argIndex := 0 + + var rArgs []reflect.Value + for i := 0; i < ft.NumIn(); i++ { + at := ft.In(i) + + switch k := at.Kind(); k { + + case reflect.Interface: + found := false + + for oi := 0; oi < len(objects); oi++ { + obj := objects[oi] + objT := reflect.TypeOf(obj) + objV := reflect.ValueOf(obj) + + log.Debugln( + at.PkgPath(), + at.Name(), + objT, "implements", at, "=", objT.Implements(at), + ) + + if objT.Implements(at) { + found = true + rArgs = append(rArgs, objV) + break + } + } + + if !found { + v := reflect.Zero(at) + rArgs = append(rArgs, v) + } + + case reflect.String: + av := reflect.ValueOf(args[argIndex]) + rArgs = append(rArgs, av) + argIndex++ + + case reflect.Bool: + bv, err := strconv.ParseBool(args[argIndex]) + if err != nil { + return "", err + } + av := reflect.ValueOf(bv) + rArgs = append(rArgs, av) + argIndex++ + + case reflect.Int64: + nf, err := strconv.ParseInt(args[argIndex], 10, 64) + if err != nil { + return "", err + } + + av := reflect.ValueOf(nf) + rArgs = append(rArgs, av) + argIndex++ + + case reflect.Float64: + nf, err := strconv.ParseFloat(args[argIndex], 64) + if err != nil { + return "", err + } + + av := reflect.ValueOf(nf) + rArgs = append(rArgs, av) + argIndex++ + } + } + + out := fv.Call(rArgs) + if ft.NumOut() == 0 { + return "", nil + } + + // try to get the error object from the return value + var state State + var err error + for i := 0; i < ft.NumOut(); i++ { + outType := ft.Out(i) + switch outType.Kind() { + case reflect.String: + if outType.Name() == "State" { + state = State(out[i].String()) + } + + case reflect.Interface: + o := out[i].Interface() + switch ov := o.(type) { + case error: + err = ov + + } + + } + } + return state, err +} + +func parseCommand(src string) (args []string) { + var err error + args, err = shellwords.Parse(src) + if err == nil { + return args + } + + // fallback to go text/scanner + var s scanner.Scanner + s.Init(strings.NewReader(src)) + s.Filename = "command" + for tok := s.Scan(); tok != scanner.EOF; tok = s.Scan() { + text := s.TokenText() + if text[0] == '"' && text[len(text)-1] == '"' { + text, _ = strconv.Unquote(text) + } + args = append(args, text) + } + + return args +} diff --git a/pkg/interact/reply.go b/pkg/interact/reply.go new file mode 100644 index 0000000000..11ead2ff31 --- /dev/null +++ b/pkg/interact/reply.go @@ -0,0 +1,65 @@ +package interact + +type Button struct { + Text string + Name string + Value string +} + +type TextField struct { + // Name is the form field name + Name string + + // Label is the field label + Label string + + // PlaceHolder is the sample text in the text input + PlaceHolder string +} + +type Option struct { + // Name is the form field name + Name string + + // Label is the option label for display + Label string + + // Value is the option value + Value string +} + +type Reply interface { + // Send sends the message directly to the client's session + Send(message string) + + // Message sets the message to the reply + Message(message string) + + // AddButton adds the button to the reply + AddButton(text string, name, value string) + + // AddMultipleButtons adds multiple buttons to the reply + AddMultipleButtons(buttonsForm [][3]string) + + // Choose(prompt string, options ...Option) + // Confirm shows the confirm dialog or confirm button in the user interface + // Confirm(prompt string) +} + +// KeyboardController is used when messenger supports keyboard controls +type KeyboardController interface { + // RemoveKeyboard hides the keyboard from the client user interface + RemoveKeyboard() +} + +// ButtonReply can be used if your reply needs button user interface. +type ButtonReply interface { + // AddButton adds the button to the reply + AddButton(text string) +} + +// DialogReply can be used if your reply needs Dialog user interface +type DialogReply interface { + // AddButton adds the button to the reply + Dialog(title, text string, buttons []string) +} diff --git a/pkg/interact/responder.go b/pkg/interact/responder.go new file mode 100644 index 0000000000..1adfffe30c --- /dev/null +++ b/pkg/interact/responder.go @@ -0,0 +1,16 @@ +package interact + +// Responder defines the logic of responding the message +type Responder func(session Session, message string, reply Reply, ctxObjects ...interface{}) error + +type CallbackResponder interface { + SetCallbackResponder(responder Responder) +} + +type TextMessageResponder interface { + SetTextMessageResponder(responder Responder) +} + +type CommandResponder interface { + AddCommand(command *Command, responder Responder) +} diff --git a/pkg/interact/session.go b/pkg/interact/session.go new file mode 100644 index 0000000000..ae8f01e5e7 --- /dev/null +++ b/pkg/interact/session.go @@ -0,0 +1,47 @@ +package interact + +import ( + "time" + + log "github.com/sirupsen/logrus" +) + +type BaseSession struct { + OriginState State `json:"originState,omitempty"` + CurrentState State `json:"currentState,omitempty"` + Authorized bool `json:"authorized,omitempty"` + StartedTime time.Time `json:"startedTime,omitempty"` + + // authorizing -- the user started authorizing himself/herself, do not ignore the message + authorizing bool +} + +func (s *BaseSession) SetOriginState(state State) { + s.OriginState = state +} + +func (s *BaseSession) GetOriginState() State { + return s.OriginState +} + +func (s *BaseSession) SetState(state State) { + log.Infof("[interact] transiting state from %s -> %s", s.CurrentState, state) + s.CurrentState = state +} + +func (s *BaseSession) GetState() State { + return s.CurrentState +} + +func (s *BaseSession) SetAuthorized() { + s.Authorized = true + s.authorizing = false +} + +func (s *BaseSession) IsAuthorized() bool { + return s.Authorized +} + +func (s *BaseSession) SetAuthorizing(b bool) { + s.authorizing = b +} diff --git a/pkg/interact/slack.go b/pkg/interact/slack.go new file mode 100644 index 0000000000..b9bb3c625c --- /dev/null +++ b/pkg/interact/slack.go @@ -0,0 +1,551 @@ +package interact + +import ( + "context" + "encoding/json" + "fmt" + stdlog "log" + "os" + "time" + + "github.com/google/uuid" + log "github.com/sirupsen/logrus" + "github.com/slack-go/slack" + "github.com/slack-go/slack/slackevents" + "github.com/slack-go/slack/socketmode" + + "github.com/c9s/bbgo/pkg/util" +) + +type SlackReply struct { + // uuid is the unique id of this question + // can be used as the callback id + uuid string + + session *SlackSession + + client *slack.Client + + message string + + buttons []Button + + textInputModalViewRequest *slack.ModalViewRequest +} + +func (reply *SlackReply) Send(message string) { + cID, tsID, err := reply.client.PostMessage( + reply.session.ChannelID, + slack.MsgOptionText(message, false), + slack.MsgOptionAsUser(false), // Add this if you want that the bot would post message as a user, otherwise it will send response using the default slackbot + ) + if err != nil { + log.WithError(err).Errorf("slack post message error: channel=%s thread=%s", cID, tsID) + return + } +} + +func (reply *SlackReply) InputText(prompt string, textFields ...TextField) { + reply.message = prompt + reply.textInputModalViewRequest = generateTextInputModalRequest(prompt, prompt, textFields...) +} + +func (reply *SlackReply) Choose(prompt string, options ...Option) { +} + +func (reply *SlackReply) Message(message string) { + reply.message = message +} + +// RemoveKeyboard is not supported by Slack +func (reply *SlackReply) RemoveKeyboard() {} + +func (reply *SlackReply) AddButton(text string, name string, value string) { + reply.buttons = append(reply.buttons, Button{ + Text: text, + Name: name, + Value: value, + }) +} + +func (reply *SlackReply) AddMultipleButtons(buttonsForm [][3]string) { + for _, buttonForm := range buttonsForm { + reply.AddButton(buttonForm[0], buttonForm[1], buttonForm[2]) + } +} + +func (reply *SlackReply) build() interface{} { + // you should avoid using this modal view request, because it interrupts the interaction flow + // once we send the modal view request, we can't go back to the channel. + // (we don't know which channel the user started the interaction) + if reply.textInputModalViewRequest != nil { + return reply.textInputModalViewRequest + } + + if len(reply.message) > 0 { + return reply.message + } + + var blocks slack.Blocks + blocks.BlockSet = append(blocks.BlockSet, slack.NewSectionBlock( + &slack.TextBlockObject{ + Type: slack.MarkdownType, + Text: reply.message, + }, + nil, // fields + nil, // accessory + slack.SectionBlockOptionBlockID(reply.uuid), + )) + + if len(reply.buttons) > 0 { + var buttons []slack.BlockElement + for _, btn := range reply.buttons { + actionID := reply.uuid + ":" + btn.Value + buttons = append(buttons, + slack.NewButtonBlockElement( + // action id should be unique + actionID, + btn.Value, + &slack.TextBlockObject{ + Type: slack.PlainTextType, + Text: btn.Text, + }, + ), + ) + } + blocks.BlockSet = append(blocks.BlockSet, slack.NewActionBlock(reply.uuid, buttons...)) + } + + return blocks +} + +type SlackSession struct { + BaseSession + + slack *Slack + ChannelID string + UserID string +} + +func NewSlackSession(slack *Slack, userID, channelID string) *SlackSession { + return &SlackSession{ + BaseSession: BaseSession{ + OriginState: StatePublic, + CurrentState: StatePublic, + Authorized: false, + authorizing: false, + + StartedTime: time.Now(), + }, + slack: slack, + UserID: userID, + ChannelID: channelID, + } +} + +func (s *SlackSession) ID() string { + return fmt.Sprintf("%s-%s", s.UserID, s.ChannelID) +} + +func (s *SlackSession) SetAuthorized() { + s.BaseSession.SetAuthorized() + s.slack.EmitAuthorized(s) +} + +type SlackSessionMap map[string]*SlackSession + +//go:generate callbackgen -type Slack +type Slack struct { + client *slack.Client + socket *socketmode.Client + + sessions SlackSessionMap + + commands map[string]*Command + commandResponders map[string]Responder + + // textMessageResponder is used for interact to register its message handler + textMessageResponder Responder + + authorizedCallbacks []func(userSession *SlackSession) + + eventsApiCallbacks []func(evt slackevents.EventsAPIEvent) +} + +func NewSlack(client *slack.Client) *Slack { + var opts = []socketmode.Option{ + socketmode.OptionLog( + stdlog.New(os.Stdout, "socketmode: ", + stdlog.Lshortfile|stdlog.LstdFlags)), + } + + if b, ok := util.GetEnvVarBool("DEBUG_SLACK"); ok { + opts = append(opts, socketmode.OptionDebug(b)) + } + + socket := socketmode.New(client, opts...) + return &Slack{ + client: client, + socket: socket, + sessions: make(SlackSessionMap), + commands: make(map[string]*Command), + commandResponders: make(map[string]Responder), + } +} + +func (s *Slack) SetTextMessageResponder(responder Responder) { + s.textMessageResponder = responder +} + +func (s *Slack) AddCommand(command *Command, responder Responder) { + if _, exists := s.commands[command.Name]; exists { + panic(fmt.Errorf("command %s already exists, can not be re-defined", command.Name)) + } + + s.commands[command.Name] = command + s.commandResponders[command.Name] = responder +} + +func (s *Slack) listen(ctx context.Context) { + for evt := range s.socket.Events { + log.Debugf("event: %+v", evt) + + switch evt.Type { + case socketmode.EventTypeConnecting: + log.Infof("connecting to slack with socket mode...") + + case socketmode.EventTypeConnectionError: + log.Infof("connection failed. retrying later...") + + case socketmode.EventTypeConnected: + log.Infof("connected to slack with socket mode.") + + case socketmode.EventTypeDisconnect: + log.Infof("slack socket mode disconnected") + + case socketmode.EventTypeEventsAPI: + eventsAPIEvent, ok := evt.Data.(slackevents.EventsAPIEvent) + if !ok { + log.Debugf("ignored %+v", evt) + continue + } + + log.Debugf("event received: %+v", eventsAPIEvent) + + // events api don't have response trigger, we can't set the response + s.socket.Ack(*evt.Request) + + s.EmitEventsApi(eventsAPIEvent) + + switch eventsAPIEvent.Type { + case slackevents.CallbackEvent: + innerEvent := eventsAPIEvent.InnerEvent + switch ev := innerEvent.Data.(type) { + case *slackevents.MessageEvent: + log.Debugf("message event: text=%+v", ev.Text) + + if len(ev.BotID) > 0 { + log.Debug("skip bot message") + continue + } + + session := s.loadSession(evt, ev.User, ev.Channel) + + if !session.authorizing && !session.Authorized { + log.Warn("[slack] session is not authorizing nor authorized, skipping message handler") + continue + } + + if s.textMessageResponder != nil { + reply := s.newReply(session) + if err := s.textMessageResponder(session, ev.Text, reply); err != nil { + log.WithError(err).Errorf("[slack] response handling error") + continue + } + + // build the response + response := reply.build() + + log.Debugln("response payload", toJson(response)) + switch response := response.(type) { + + case string: + _, _, err := s.client.PostMessage(ev.Channel, slack.MsgOptionText(response, false)) + if err != nil { + log.WithError(err).Error("failed posting plain text message") + } + case slack.Blocks: + _, _, err := s.client.PostMessage(ev.Channel, slack.MsgOptionBlocks(response.BlockSet...)) + if err != nil { + log.WithError(err).Error("failed posting blocks message") + } + + default: + log.Errorf("[slack] unexpected message type %T: %+v", response, response) + + } + } + + case *slackevents.AppMentionEvent: + log.Infof("app mention event: %+v", ev) + s.socket.Ack(*evt.Request) + + case *slackevents.MemberJoinedChannelEvent: + log.Infof("user %q joined to channel %q", ev.User, ev.Channel) + s.socket.Ack(*evt.Request) + } + default: + s.socket.Debugf("unsupported Events API event received") + } + + case socketmode.EventTypeInteractive: + callback, ok := evt.Data.(slack.InteractionCallback) + if !ok { + log.Debugf("ignored %+v", evt) + continue + } + + log.Debugf("interaction received: %+v", callback) + + var payload interface{} + + switch callback.Type { + case slack.InteractionTypeBlockActions: + // See https://api.slack.com/apis/connections/socket-implement#button + log.Debugf("InteractionTypeBlockActions: %+v", callback) + + case slack.InteractionTypeShortcut: + log.Debugf("InteractionTypeShortcut: %+v", callback) + + case slack.InteractionTypeViewSubmission: + + // See https://api.slack.com/apis/connections/socket-implement#modal + log.Debugf("[slack] InteractionTypeViewSubmission: %+v", callback) + var values = simplifyStateValues(callback.View.State) + + if len(values) > 1 { + log.Warnf("[slack] more than 1 values received from the modal view submission, the value choosen from the state values might be incorrect") + } + + log.Debugln(toJson(values)) + if inputValue, ok := takeOneValue(values); ok { + session := s.loadSession(evt, callback.User.ID, callback.Channel.ID) + + if !session.authorizing && !session.Authorized { + log.Warn("[slack] telegram is set to private mode, skipping message") + continue + } + + reply := s.newReply(session) + if s.textMessageResponder != nil { + if err := s.textMessageResponder(session, inputValue, reply); err != nil { + log.WithError(err).Errorf("[slack] response handling error") + continue + } + } + + // close the modal view by sending a null payload + s.socket.Ack(*evt.Request) + + // build the response + response := reply.build() + + log.Debugln("response payload", toJson(response)) + switch response := response.(type) { + + case string: + payload = map[string]interface{}{ + "blocks": []slack.Block{ + translateMessageToBlock(response), + }, + } + + case slack.Blocks: + payload = map[string]interface{}{ + "blocks": response.BlockSet, + } + default: + s.socket.Ack(*evt.Request, response) + } + } + + case slack.InteractionTypeDialogSubmission: + log.Debugf("[slack] InteractionTypeDialogSubmission: %+v", callback) + + default: + log.Debugf("[slack] unexpected callback type: %+v", callback) + + } + + s.socket.Ack(*evt.Request, payload) + + case socketmode.EventTypeHello: + log.Debugf("[slack] hello command received: %+v", evt) + + case socketmode.EventTypeSlashCommand: + slashCmd, ok := evt.Data.(slack.SlashCommand) + if !ok { + log.Debugf("[slack] ignored %+v", evt) + continue + } + + log.Debugf("[slack] slash command received: %+v", slashCmd) + responder, exists := s.commandResponders[slashCmd.Command] + if !exists { + log.Errorf("[slack] command %s does not exist", slashCmd.Command) + s.socket.Ack(*evt.Request) + continue + } + + session := s.loadSession(evt, slashCmd.UserID, slashCmd.ChannelID) + reply := s.newReply(session) + if err := responder(session, slashCmd.Text, reply); err != nil { + log.WithError(err).Errorf("[slack] responder returns error") + s.socket.Ack(*evt.Request) + continue + } + + payload := reply.build() + if payload == nil { + log.Warnf("[slack] reply returns nil payload") + // ack with empty payload + s.socket.Ack(*evt.Request) + continue + } + + switch o := payload.(type) { + + case string: + s.socket.Ack(*evt.Request, map[string]interface{}{ + "blocks": []slack.Block{ + translateMessageToBlock(o), + }, + }) + + case *slack.ModalViewRequest: + if resp, err := s.socket.OpenView(slashCmd.TriggerID, *o); err != nil { + log.WithError(err).Errorf("[slack] view open error, resp: %+v", resp) + } + s.socket.Ack(*evt.Request) + + case slack.Blocks: + s.socket.Ack(*evt.Request, map[string]interface{}{ + "blocks": o.BlockSet, + }) + default: + s.socket.Ack(*evt.Request, o) + } + + default: + log.Debugf("[slack] unexpected event type received: %s", evt.Type) + } + } +} + +func (s *Slack) loadSession(evt socketmode.Event, userID, channelID string) *SlackSession { + key := userID + "-" + channelID + if session, ok := s.sessions[key]; ok { + log.Infof("[slack] an existing session %q found, session: %+v", key, session) + return session + } + + session := NewSlackSession(s, userID, channelID) + s.sessions[key] = session + log.Infof("[slack] allocated a new session %q, session: %+v", key, session) + return session +} + +func (s *Slack) newReply(session *SlackSession) *SlackReply { + return &SlackReply{ + uuid: uuid.New().String(), + session: session, + } +} + +func (s *Slack) Start(ctx context.Context) { + go s.listen(ctx) + if err := s.socket.Run(); err != nil { + log.WithError(err).Errorf("slack socketmode error") + } +} + +// generateTextInputModalRequest generates a general slack modal view request with the given text fields +// see also https://api.slack.com/surfaces/modals/using#opening +func generateTextInputModalRequest(title string, prompt string, textFields ...TextField) *slack.ModalViewRequest { + // create a ModalViewRequest with a header and two inputs + titleText := slack.NewTextBlockObject("plain_text", title, false, false) + closeText := slack.NewTextBlockObject("plain_text", "Close", false, false) + submitText := slack.NewTextBlockObject("plain_text", "Submit", false, false) + + headerText := slack.NewTextBlockObject("mrkdwn", prompt, false, false) + headerSection := slack.NewSectionBlock(headerText, nil, nil) + + blocks := slack.Blocks{ + BlockSet: []slack.Block{ + headerSection, + }, + } + + for _, textField := range textFields { + labelObject := slack.NewTextBlockObject("plain_text", textField.Label, false, false) + placeHolderObject := slack.NewTextBlockObject("plain_text", textField.PlaceHolder, false, false) + textInputObject := slack.NewPlainTextInputBlockElement(placeHolderObject, textField.Name) + + // Notice that blockID is a unique identifier for a block + inputBlock := slack.NewInputBlock("block-"+textField.Name+"-"+uuid.NewString(), labelObject, textInputObject) + blocks.BlockSet = append(blocks.BlockSet, inputBlock) + } + + var modalRequest slack.ModalViewRequest + modalRequest.Type = slack.ViewType("modal") + modalRequest.Title = titleText + modalRequest.Close = closeText + modalRequest.Submit = submitText + modalRequest.Blocks = blocks + return &modalRequest +} + +// simplifyStateValues simplifies the multi-layer structured values into just name=value mapping +func simplifyStateValues(state *slack.ViewState) map[string]string { + var values = make(map[string]string) + + if state == nil { + return values + } + + for blockID, fields := range state.Values { + _ = blockID + for fieldName, fieldValues := range fields { + values[fieldName] = fieldValues.Value + } + } + return values +} + +func takeOneValue(values map[string]string) (string, bool) { + for _, v := range values { + return v, true + } + return "", false +} + +func toJson(v interface{}) string { + o, err := json.MarshalIndent(v, "", " ") + if err != nil { + log.WithError(err).Errorf("json marshal error") + return "" + } + return string(o) +} + +func translateMessageToBlock(message string) slack.Block { + return slack.NewSectionBlock( + &slack.TextBlockObject{ + Type: slack.MarkdownType, + Text: message, + }, + nil, // fields + nil, // accessory + // slack.SectionBlockOptionBlockID(reply.uuid), + ) +} diff --git a/pkg/interact/slack_callbacks.go b/pkg/interact/slack_callbacks.go new file mode 100644 index 0000000000..40460f3318 --- /dev/null +++ b/pkg/interact/slack_callbacks.go @@ -0,0 +1,27 @@ +// Code generated by "callbackgen -type Slack"; DO NOT EDIT. + +package interact + +import ( + "github.com/slack-go/slack/slackevents" +) + +func (s *Slack) OnAuthorized(cb func(userSession *SlackSession)) { + s.authorizedCallbacks = append(s.authorizedCallbacks, cb) +} + +func (s *Slack) EmitAuthorized(userSession *SlackSession) { + for _, cb := range s.authorizedCallbacks { + cb(userSession) + } +} + +func (s *Slack) OnEventsApi(cb func(evt slackevents.EventsAPIEvent)) { + s.eventsApiCallbacks = append(s.eventsApiCallbacks, cb) +} + +func (s *Slack) EmitEventsApi(evt slackevents.EventsAPIEvent) { + for _, cb := range s.eventsApiCallbacks { + cb(evt) + } +} diff --git a/pkg/interact/state.go b/pkg/interact/state.go new file mode 100644 index 0000000000..3bf517db6f --- /dev/null +++ b/pkg/interact/state.go @@ -0,0 +1,8 @@ +package interact + +type State string + +const ( + StatePublic State = "public" + StateAuthenticated State = "authenticated" +) diff --git a/pkg/interact/telegram.go b/pkg/interact/telegram.go new file mode 100644 index 0000000000..464e9b763d --- /dev/null +++ b/pkg/interact/telegram.go @@ -0,0 +1,258 @@ +package interact + +import ( + "context" + "fmt" + "strings" + "time" + + log "github.com/sirupsen/logrus" + "gopkg.in/tucnak/telebot.v2" +) + +func init() { + // force interface type check + _ = Reply(&TelegramReply{}) +} + +type TelegramSessionMap map[int64]*TelegramSession + +type TelegramSession struct { + BaseSession + + telegram *Telegram + + User *telebot.User `json:"user"` + Chat *telebot.Chat `json:"chat"` +} + +func (s *TelegramSession) ID() string { + return fmt.Sprintf("telegram-%d-%d", s.User.ID, s.Chat.ID) +} + +func (s *TelegramSession) SetAuthorized() { + s.BaseSession.SetAuthorized() + s.telegram.EmitAuthorized(s) +} + +func NewTelegramSession(telegram *Telegram, message *telebot.Message) *TelegramSession { + return &TelegramSession{ + BaseSession: BaseSession{ + OriginState: StatePublic, + CurrentState: StatePublic, + Authorized: false, + authorizing: false, + + StartedTime: time.Now(), + }, + telegram: telegram, + User: message.Sender, + Chat: message.Chat, + } +} + +type TelegramReply struct { + bot *telebot.Bot + session *TelegramSession + + message string + menu *telebot.ReplyMarkup + buttons []telebot.Btn + set bool +} + +func (r *TelegramReply) Send(message string) { + checkSendErr(r.bot.Send(r.session.Chat, message)) +} + +func (r *TelegramReply) Message(message string) { + r.message = message + r.set = true +} + +func (r *TelegramReply) RemoveKeyboard() { + r.menu.ReplyKeyboardRemove = true + r.set = true +} + +func (r *TelegramReply) AddButton(text string, name string, value string) { + var button = r.menu.Text(text) + r.buttons = append(r.buttons, button) + r.set = true +} + +func (r *TelegramReply) AddMultipleButtons(buttonsForm [][3]string) { + for _, buttonForm := range buttonsForm { + r.AddButton(buttonForm[0], buttonForm[1], buttonForm[2]) + } +} + +func (r *TelegramReply) build() { + var rows []telebot.Row + for _, button := range r.buttons { + rows = append(rows, telebot.Row{ + button, + }) + } + r.menu.Reply(rows...) +} + +//go:generate callbackgen -type Telegram +type Telegram struct { + Bot *telebot.Bot `json:"-"` + + // Private is used to protect the telegram bot, users not authenticated can not see messages or sending commands + Private bool `json:"private,omitempty"` + + sessions TelegramSessionMap + + // textMessageResponder is used for interact to register its message handler + textMessageResponder Responder + + callbackResponder CallbackResponder + + commands []*Command + + authorizedCallbacks []func(s *TelegramSession) +} + +func NewTelegram(bot *telebot.Bot) *Telegram { + return &Telegram{ + Bot: bot, + Private: true, + sessions: make(map[int64]*TelegramSession), + } +} + +func (tm *Telegram) SetCallbackResponder(responder CallbackResponder) { + tm.callbackResponder = responder +} + +func (tm *Telegram) SetTextMessageResponder(responder Responder) { + tm.textMessageResponder = responder +} + +func (tm *Telegram) Start(context.Context) { + tm.Bot.Handle(telebot.OnCallback, func(c *telebot.Callback) { + log.Infof("[telegram] onCallback: %+v", c) + }) + + tm.Bot.Handle(telebot.OnText, func(m *telebot.Message) { + log.Infof("[telegram] onText: %+v", m) + + session := tm.loadSession(m) + if tm.Private { + if !session.authorizing && !session.Authorized { + log.Warn("[telegram] telegram is set to private mode, skipping message") + return + } + } + + reply := tm.newReply(session) + if tm.textMessageResponder != nil { + if err := tm.textMessageResponder(session, m.Text, reply); err != nil { + log.WithError(err).Errorf("[telegram] response handling error") + } + } + + if reply.set { + reply.build() + checkSendErr(tm.Bot.Send(m.Chat, reply.message, reply.menu)) + } + }) + + var cmdList []telebot.Command + for _, cmd := range tm.commands { + if len(cmd.Desc) == 0 { + continue + } + + cmdList = append(cmdList, telebot.Command{ + Text: strings.ToLower(strings.TrimLeft(cmd.Name, "/")), + Description: cmd.Desc, + }) + } + if err := tm.Bot.SetCommands(cmdList); err != nil { + log.WithError(err).Errorf("[telegram] set commands error") + } + + tm.Bot.Start() +} + +func checkSendErr(m *telebot.Message, err error) { + if err != nil { + log.WithError(err).Errorf("[telegram] message send error") + } +} + +func (tm *Telegram) loadSession(m *telebot.Message) *TelegramSession { + if tm.sessions == nil { + tm.sessions = make(map[int64]*TelegramSession) + } + + session, ok := tm.sessions[m.Chat.ID] + if ok { + log.Infof("[telegram] loaded existing session: %+v", session) + return session + } + + session = NewTelegramSession(tm, m) + tm.sessions[m.Chat.ID] = session + + log.Infof("[telegram] allocated a new session: %+v", session) + return session +} + +func (tm *Telegram) AddCommand(cmd *Command, responder Responder) { + tm.commands = append(tm.commands, cmd) + tm.Bot.Handle(cmd.Name, func(m *telebot.Message) { + session := tm.loadSession(m) + reply := tm.newReply(session) + if err := responder(session, m.Payload, reply); err != nil { + log.WithError(err).Errorf("[telegram] responder error") + checkSendErr(tm.Bot.Send(m.Chat, fmt.Sprintf("error: %v", err))) + return + } + + // build up the response objects + if reply.set { + reply.build() + checkSendErr(tm.Bot.Send(m.Chat, reply.message, reply.menu)) + } + }) +} + +func (tm *Telegram) newReply(session *TelegramSession) *TelegramReply { + return &TelegramReply{ + bot: tm.Bot, + session: session, + menu: &telebot.ReplyMarkup{ResizeReplyKeyboard: true}, + } +} + +func (tm *Telegram) Sessions() TelegramSessionMap { + return tm.sessions +} + +func (tm *Telegram) RestoreSessions(sessions TelegramSessionMap) { + if len(sessions) == 0 { + return + } + + log.Infof("[telegram] restoring telegram %d sessions", len(sessions)) + tm.sessions = sessions + for _, session := range sessions { + if session.Chat == nil || session.User == nil { + continue + } + + // update telegram context reference + session.telegram = tm + + if session.IsAuthorized() { + if _, err := tm.Bot.Send(session.Chat, fmt.Sprintf("Hi %s, I'm back. Your telegram session is restored.", session.User.Username)); err != nil { + log.WithError(err).Error("[telegram] can not send telegram message") + } + } + } +} diff --git a/pkg/interact/telegram_callbacks.go b/pkg/interact/telegram_callbacks.go new file mode 100644 index 0000000000..bc3c15bcf1 --- /dev/null +++ b/pkg/interact/telegram_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type Telegram"; DO NOT EDIT. + +package interact + +import () + +func (tm *Telegram) OnAuthorized(cb func(s *TelegramSession)) { + tm.authorizedCallbacks = append(tm.authorizedCallbacks, cb) +} + +func (tm *Telegram) EmitAuthorized(s *TelegramSession) { + for _, cb := range tm.authorizedCallbacks { + cb(s) + } +} diff --git a/pkg/migrations/20200721225616_trades.go b/pkg/migrations/20200721225616_trades.go deleted file mode 100644 index 12f449bbd9..0000000000 --- a/pkg/migrations/20200721225616_trades.go +++ /dev/null @@ -1,33 +0,0 @@ -package migrations - -import ( - "context" - - "github.com/c9s/rockhopper" -) - -func init() { - rockhopper.AddMigration(upTrades, downTrades) -} - -func upTrades(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { - // This code is executed when the migration is applied. - - _, err = tx.ExecContext(ctx, "CREATE TABLE `trades`\n(\n `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,\n `id` BIGINT UNSIGNED,\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n `symbol` VARCHAR(8) NOT NULL,\n `price` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `quantity` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `quote_quantity` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `fee` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `fee_currency` VARCHAR(4) NOT NULL,\n `is_buyer` BOOLEAN NOT NULL DEFAULT FALSE,\n `is_maker` BOOLEAN NOT NULL DEFAULT FALSE,\n `side` VARCHAR(4) NOT NULL DEFAULT '',\n `traded_at` DATETIME(3) NOT NULL,\n PRIMARY KEY (`gid`),\n UNIQUE KEY `id` (`id`)\n);") - if err != nil { - return err - } - - return err -} - -func downTrades(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { - // This code is executed when the migration is rolled back. - - _, err = tx.ExecContext(ctx, "DROP TABLE `trades`;") - if err != nil { - return err - } - - return err -} diff --git a/pkg/migrations/20201211175751_fix_symbol_length.go b/pkg/migrations/20201211175751_fix_symbol_length.go deleted file mode 100644 index 6349d3763f..0000000000 --- a/pkg/migrations/20201211175751_fix_symbol_length.go +++ /dev/null @@ -1,43 +0,0 @@ -package migrations - -import ( - "context" - - "github.com/c9s/rockhopper" -) - -func init() { - rockhopper.AddMigration(upFixSymbolLength, downFixSymbolLength) -} - -func upFixSymbolLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { - // This code is executed when the migration is applied. - - _, err = tx.ExecContext(ctx, "ALTER TABLE trades MODIFY COLUMN symbol VARCHAR(9);") - if err != nil { - return err - } - - _, err = tx.ExecContext(ctx, "ALTER TABLE orders MODIFY COLUMN symbol VARCHAR(9);") - if err != nil { - return err - } - - return err -} - -func downFixSymbolLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { - // This code is executed when the migration is rolled back. - - _, err = tx.ExecContext(ctx, "ALTER TABLE trades MODIFY COLUMN symbol VARCHAR(8);") - if err != nil { - return err - } - - _, err = tx.ExecContext(ctx, "ALTER TABLE orders MODIFY COLUMN symbol VARCHAR(8);") - if err != nil { - return err - } - - return err -} diff --git a/pkg/migrations/20210118163847_fix_unique_index.go b/pkg/migrations/20210118163847_fix_unique_index.go deleted file mode 100644 index 8e31b59643..0000000000 --- a/pkg/migrations/20210118163847_fix_unique_index.go +++ /dev/null @@ -1,43 +0,0 @@ -package migrations - -import ( - "context" - - "github.com/c9s/rockhopper" -) - -func init() { - rockhopper.AddMigration(upFixUniqueIndex, downFixUniqueIndex) -} - -func upFixUniqueIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { - // This code is executed when the migration is applied. - - _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` DROP INDEX `id`;") - if err != nil { - return err - } - - _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` ADD UNIQUE INDEX `id` (`exchange`,`symbol`, `side`, `id`);") - if err != nil { - return err - } - - return err -} - -func downFixUniqueIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { - // This code is executed when the migration is rolled back. - - _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` DROP INDEX `id`;") - if err != nil { - return err - } - - _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` ADD UNIQUE INDEX `id` (`id`);") - if err != nil { - return err - } - - return err -} diff --git a/pkg/migrations/20210119232826_add_margin_columns.go b/pkg/migrations/20210119232826_add_margin_columns.go deleted file mode 100644 index 778876cad3..0000000000 --- a/pkg/migrations/20210119232826_add_margin_columns.go +++ /dev/null @@ -1,43 +0,0 @@ -package migrations - -import ( - "context" - - "github.com/c9s/rockhopper" -) - -func init() { - rockhopper.AddMigration(upAddMarginColumns, downAddMarginColumns) -} - -func upAddMarginColumns(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { - // This code is executed when the migration is applied. - - _, err = tx.ExecContext(ctx, "ALTER TABLE `trades`\n ADD COLUMN `is_margin` BOOLEAN NOT NULL DEFAULT FALSE,\n ADD COLUMN `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE\n ;") - if err != nil { - return err - } - - _, err = tx.ExecContext(ctx, "ALTER TABLE `orders`\n ADD COLUMN `is_margin` BOOLEAN NOT NULL DEFAULT FALSE,\n ADD COLUMN `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE\n ;") - if err != nil { - return err - } - - return err -} - -func downAddMarginColumns(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { - // This code is executed when the migration is rolled back. - - _, err = tx.ExecContext(ctx, "ALTER TABLE `trades`\n DROP COLUMN `is_margin`,\n DROP COLUMN `is_isolated`;") - if err != nil { - return err - } - - _, err = tx.ExecContext(ctx, "ALTER TABLE `orders`\n DROP COLUMN `is_margin`,\n DROP COLUMN `is_isolated`;") - if err != nil { - return err - } - - return err -} diff --git a/pkg/migrations/mysql/20200721225616_trades.go b/pkg/migrations/mysql/20200721225616_trades.go new file mode 100644 index 0000000000..1a40f3c32e --- /dev/null +++ b/pkg/migrations/mysql/20200721225616_trades.go @@ -0,0 +1,64 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upTrades, downTrades) + +} + +func upTrades(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `trades`\n(\n `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,\n `id` BIGINT UNSIGNED,\n `order_id` BIGINT UNSIGNED NOT NULL,\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n `symbol` VARCHAR(20) NOT NULL,\n `price` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `quantity` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `quote_quantity` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `fee` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `fee_currency` VARCHAR(10) NOT NULL,\n `is_buyer` BOOLEAN NOT NULL DEFAULT FALSE,\n `is_maker` BOOLEAN NOT NULL DEFAULT FALSE,\n `side` VARCHAR(4) NOT NULL DEFAULT '',\n `traded_at` DATETIME(3) NOT NULL,\n `is_margin` BOOLEAN NOT NULL DEFAULT FALSE,\n `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE,\n `strategy` VARCHAR(32) NULL,\n `pnl` DECIMAL NULL,\n PRIMARY KEY (`gid`),\n UNIQUE KEY `id` (`exchange`, `symbol`, `side`, `id`)\n);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_symbol ON trades (exchange, symbol);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_symbol_fee_currency ON trades (exchange, symbol, fee_currency, traded_at);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_traded_at_symbol ON trades (exchange, traded_at, symbol);") + if err != nil { + return err + } + + return err +} + +func downTrades(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `trades`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX trades_symbol ON trades;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX trades_symbol_fee_currency ON trades;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX trades_traded_at_symbol ON trades;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20200819054742_trade_index.go b/pkg/migrations/mysql/20200819054742_trade_index.go new file mode 100644 index 0000000000..60ba7925b6 --- /dev/null +++ b/pkg/migrations/mysql/20200819054742_trade_index.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upTradeIndex, downTradeIndex) + +} + +func upTradeIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} + +func downTradeIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/20201102222546_orders.go b/pkg/migrations/mysql/20201102222546_orders.go similarity index 63% rename from pkg/migrations/20201102222546_orders.go rename to pkg/migrations/mysql/20201102222546_orders.go index 04af541509..a7cca310a4 100644 --- a/pkg/migrations/20201102222546_orders.go +++ b/pkg/migrations/mysql/20201102222546_orders.go @@ -1,4 +1,4 @@ -package migrations +package mysql import ( "context" @@ -7,13 +7,24 @@ import ( ) func init() { - rockhopper.AddMigration(upOrders, downOrders) + AddMigration(upOrders, downOrders) + } func upOrders(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { // This code is executed when the migration is applied. - _, err = tx.ExecContext(ctx, "CREATE TABLE `orders`\n(\n `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n -- order_id is the order id returned from the exchange\n `order_id` BIGINT UNSIGNED NOT NULL,\n `client_order_id` VARCHAR(42) NOT NULL DEFAULT '',\n `order_type` VARCHAR(16) NOT NULL,\n `symbol` VARCHAR(8) NOT NULL,\n `status` VARCHAR(12) NOT NULL,\n `time_in_force` VARCHAR(4) NOT NULL,\n `price` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `stop_price` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `quantity` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `executed_quantity` DECIMAL(16, 8) UNSIGNED NOT NULL DEFAULT 0.0,\n `side` VARCHAR(4) NOT NULL DEFAULT '',\n `is_working` BOOL NOT NULL DEFAULT FALSE,\n `created_at` DATETIME(3) NOT NULL,\n `updated_at` DATETIME(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3),\n PRIMARY KEY (`gid`)\n);") + _, err = tx.ExecContext(ctx, "CREATE TABLE `orders`\n(\n `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n -- order_id is the order id returned from the exchange\n `order_id` BIGINT UNSIGNED NOT NULL,\n `client_order_id` VARCHAR(122) NOT NULL DEFAULT '',\n `order_type` VARCHAR(16) NOT NULL,\n `symbol` VARCHAR(20) NOT NULL,\n `status` VARCHAR(12) NOT NULL,\n `time_in_force` VARCHAR(4) NOT NULL,\n `price` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `stop_price` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `quantity` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `executed_quantity` DECIMAL(16, 8) UNSIGNED NOT NULL DEFAULT 0.0,\n `side` VARCHAR(4) NOT NULL DEFAULT '',\n `is_working` BOOL NOT NULL DEFAULT FALSE,\n `created_at` DATETIME(3) NOT NULL,\n `updated_at` DATETIME(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3),\n `is_margin` BOOLEAN NOT NULL DEFAULT FALSE,\n `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE,\n PRIMARY KEY (`gid`)\n);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX orders_symbol ON orders (exchange, symbol);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE UNIQUE INDEX orders_order_id ON orders (order_id, exchange);") if err != nil { return err } @@ -24,6 +35,16 @@ func upOrders(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { func downOrders(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { // This code is executed when the migration is rolled back. + _, err = tx.ExecContext(ctx, "DROP INDEX orders_symbol ON orders;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX orders_order_id ON orders;") + if err != nil { + return err + } + _, err = tx.ExecContext(ctx, "DROP TABLE `orders`;") if err != nil { return err diff --git a/pkg/migrations/20201103173342_trades_add_order_id.go b/pkg/migrations/mysql/20201103173342_trades_add_order_id.go similarity index 62% rename from pkg/migrations/20201103173342_trades_add_order_id.go rename to pkg/migrations/mysql/20201103173342_trades_add_order_id.go index 596624746d..e9c8c24da4 100644 --- a/pkg/migrations/20201103173342_trades_add_order_id.go +++ b/pkg/migrations/mysql/20201103173342_trades_add_order_id.go @@ -1,4 +1,4 @@ -package migrations +package mysql import ( "context" @@ -7,13 +7,14 @@ import ( ) func init() { - rockhopper.AddMigration(upTradesAddOrderId, downTradesAddOrderId) + AddMigration(upTradesAddOrderId, downTradesAddOrderId) + } func upTradesAddOrderId(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { // This code is executed when the migration is applied. - _, err = tx.ExecContext(ctx, "ALTER TABLE `trades`\n ADD COLUMN `order_id` BIGINT UNSIGNED NOT NULL;") + _, err = tx.ExecContext(ctx, "SELECT 1;") if err != nil { return err } @@ -24,7 +25,7 @@ func upTradesAddOrderId(ctx context.Context, tx rockhopper.SQLExecutor) (err err func downTradesAddOrderId(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { // This code is executed when the migration is rolled back. - _, err = tx.ExecContext(ctx, "ALTER TABLE `trades`\n DROP COLUMN `order_id`;") + _, err = tx.ExecContext(ctx, "SELECT 1;") if err != nil { return err } diff --git a/pkg/migrations/mysql/20201105092857_trades_index_fix.go b/pkg/migrations/mysql/20201105092857_trades_index_fix.go new file mode 100644 index 0000000000..fdbea09a6f --- /dev/null +++ b/pkg/migrations/mysql/20201105092857_trades_index_fix.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upTradesIndexFix, downTradesIndexFix) + +} + +func upTradesIndexFix(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} + +func downTradesIndexFix(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20201105093056_orders_add_index.go b/pkg/migrations/mysql/20201105093056_orders_add_index.go new file mode 100644 index 0000000000..7dec0d8056 --- /dev/null +++ b/pkg/migrations/mysql/20201105093056_orders_add_index.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upOrdersAddIndex, downOrdersAddIndex) + +} + +func upOrdersAddIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} + +func downOrdersAddIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/20201106114742_klines.go b/pkg/migrations/mysql/20201106114742_klines.go similarity index 71% rename from pkg/migrations/20201106114742_klines.go rename to pkg/migrations/mysql/20201106114742_klines.go index bbb4ca9ac4..43fd04ca44 100644 --- a/pkg/migrations/20201106114742_klines.go +++ b/pkg/migrations/mysql/20201106114742_klines.go @@ -1,4 +1,4 @@ -package migrations +package mysql import ( "context" @@ -7,13 +7,14 @@ import ( ) func init() { - rockhopper.AddMigration(upKlines, downKlines) + AddMigration(upKlines, downKlines) + } func upKlines(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { // This code is executed when the migration is applied. - _, err = tx.ExecContext(ctx, "CREATE TABLE `klines`\n(\n `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,\n `exchange` VARCHAR(10) NOT NULL,\n `start_time` DATETIME(3) NOT NULL,\n `end_time` DATETIME(3) NOT NULL,\n `interval` VARCHAR(3) NOT NULL,\n `symbol` VARCHAR(7) NOT NULL,\n `open` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `high` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `low` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `close` DECIMAL(16, 8) UNSIGNED NOT NULL DEFAULT 0.0,\n `volume` DECIMAL(16, 8) UNSIGNED NOT NULL DEFAULT 0.0,\n `closed` BOOL NOT NULL DEFAULT TRUE,\n `last_trade_id` INT UNSIGNED NOT NULL DEFAULT 0,\n `num_trades` INT UNSIGNED NOT NULL DEFAULT 0,\n PRIMARY KEY (`gid`)\n);") + _, err = tx.ExecContext(ctx, "CREATE TABLE `klines`\n(\n `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,\n `exchange` VARCHAR(10) NOT NULL,\n `start_time` DATETIME(3) NOT NULL,\n `end_time` DATETIME(3) NOT NULL,\n `interval` VARCHAR(3) NOT NULL,\n `symbol` VARCHAR(20) NOT NULL,\n `open` DECIMAL(20, 8) UNSIGNED NOT NULL,\n `high` DECIMAL(20, 8) UNSIGNED NOT NULL,\n `low` DECIMAL(20, 8) UNSIGNED NOT NULL,\n `close` DECIMAL(20, 8) UNSIGNED NOT NULL DEFAULT 0.0,\n `volume` DECIMAL(20, 8) UNSIGNED NOT NULL DEFAULT 0.0,\n `closed` BOOL NOT NULL DEFAULT TRUE,\n `last_trade_id` INT UNSIGNED NOT NULL DEFAULT 0,\n `num_trades` INT UNSIGNED NOT NULL DEFAULT 0,\n PRIMARY KEY (`gid`)\n);") if err != nil { return err } diff --git a/pkg/migrations/mysql/20201211175751_fix_symbol_length.go b/pkg/migrations/mysql/20201211175751_fix_symbol_length.go new file mode 100644 index 0000000000..56456075f2 --- /dev/null +++ b/pkg/migrations/mysql/20201211175751_fix_symbol_length.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upFixSymbolLength, downFixSymbolLength) + +} + +func upFixSymbolLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} + +func downFixSymbolLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20210118163847_fix_unique_index.go b/pkg/migrations/mysql/20210118163847_fix_unique_index.go new file mode 100644 index 0000000000..f370b21958 --- /dev/null +++ b/pkg/migrations/mysql/20210118163847_fix_unique_index.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upFixUniqueIndex, downFixUniqueIndex) + +} + +func upFixUniqueIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} + +func downFixUniqueIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20210119232826_add_margin_columns.go b/pkg/migrations/mysql/20210119232826_add_margin_columns.go new file mode 100644 index 0000000000..f7b8bdc585 --- /dev/null +++ b/pkg/migrations/mysql/20210119232826_add_margin_columns.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddMarginColumns, downAddMarginColumns) + +} + +func upAddMarginColumns(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} + +func downAddMarginColumns(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20210129182704_trade_price_quantity_index.go b/pkg/migrations/mysql/20210129182704_trade_price_quantity_index.go new file mode 100644 index 0000000000..9b6a5753bd --- /dev/null +++ b/pkg/migrations/mysql/20210129182704_trade_price_quantity_index.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upTradePriceQuantityIndex, downTradePriceQuantityIndex) + +} + +func upTradePriceQuantityIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_price_quantity ON trades (order_id,price,quantity);") + if err != nil { + return err + } + + return err +} + +func downTradePriceQuantityIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP INDEX trades_price_quantity ON trades") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20210215203116_add_pnl_column.go b/pkg/migrations/mysql/20210215203116_add_pnl_column.go new file mode 100644 index 0000000000..712b5e848c --- /dev/null +++ b/pkg/migrations/mysql/20210215203116_add_pnl_column.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddPnlColumn, downAddPnlColumn) + +} + +func upAddPnlColumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} + +func downAddPnlColumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20210223080622_add_rewards_table.go b/pkg/migrations/mysql/20210223080622_add_rewards_table.go new file mode 100644 index 0000000000..edf0f9ab8b --- /dev/null +++ b/pkg/migrations/mysql/20210223080622_add_rewards_table.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddRewardsTable, downAddRewardsTable) + +} + +func upAddRewardsTable(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `rewards`\n(\n `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,\n -- for exchange\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n -- reward record id\n `uuid` VARCHAR(32) NOT NULL,\n `reward_type` VARCHAR(24) NOT NULL DEFAULT '',\n -- currency symbol, BTC, MAX, USDT ... etc\n `currency` VARCHAR(5) NOT NULL,\n -- the quantity of the rewards\n `quantity` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `state` VARCHAR(5) NOT NULL,\n `created_at` DATETIME NOT NULL,\n `spent` BOOLEAN NOT NULL DEFAULT FALSE,\n `note` TEXT NULL,\n PRIMARY KEY (`gid`),\n UNIQUE KEY `uuid` (`exchange`, `uuid`)\n);") + if err != nil { + return err + } + + return err +} + +func downAddRewardsTable(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `rewards`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20210301140656_add_withdraws_table.go b/pkg/migrations/mysql/20210301140656_add_withdraws_table.go new file mode 100644 index 0000000000..3f6c296a4b --- /dev/null +++ b/pkg/migrations/mysql/20210301140656_add_withdraws_table.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddWithdrawsTable, downAddWithdrawsTable) + +} + +func upAddWithdrawsTable(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `withdraws`\n(\n `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n -- asset is the asset name (currency)\n `asset` VARCHAR(10) NOT NULL,\n `address` VARCHAR(128) NOT NULL,\n `network` VARCHAR(32) NOT NULL DEFAULT '',\n `amount` DECIMAL(16, 8) NOT NULL,\n `txn_id` VARCHAR(256) NOT NULL,\n `txn_fee` DECIMAL(16, 8) NOT NULL DEFAULT 0,\n `txn_fee_currency` VARCHAR(32) NOT NULL DEFAULT '',\n `time` DATETIME(3) NOT NULL,\n PRIMARY KEY (`gid`),\n UNIQUE KEY `txn_id` (`exchange`, `txn_id`)\n);") + if err != nil { + return err + } + + return err +} + +func downAddWithdrawsTable(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `withdraws`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20210307201830_add_deposits_table.go b/pkg/migrations/mysql/20210307201830_add_deposits_table.go new file mode 100644 index 0000000000..753658b03a --- /dev/null +++ b/pkg/migrations/mysql/20210307201830_add_deposits_table.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddDepositsTable, downAddDepositsTable) + +} + +func upAddDepositsTable(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `deposits`\n(\n `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,\n `exchange` VARCHAR(24) NOT NULL,\n -- asset is the asset name (currency)\n `asset` VARCHAR(10) NOT NULL,\n `address` VARCHAR(128) NOT NULL DEFAULT '',\n `amount` DECIMAL(16, 8) NOT NULL,\n `txn_id` VARCHAR(256) NOT NULL,\n `time` DATETIME(3) NOT NULL,\n PRIMARY KEY (`gid`),\n UNIQUE KEY `txn_id` (`exchange`, `txn_id`)\n);") + if err != nil { + return err + } + + return err +} + +func downAddDepositsTable(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `deposits`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20210416230730_klines_symbol_length.go b/pkg/migrations/mysql/20210416230730_klines_symbol_length.go new file mode 100644 index 0000000000..4a5672bebe --- /dev/null +++ b/pkg/migrations/mysql/20210416230730_klines_symbol_length.go @@ -0,0 +1,64 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upKlinesSymbolLength, downKlinesSymbolLength) + +} + +func upKlinesSymbolLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `klines`\nMODIFY COLUMN `symbol` VARCHAR(10) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `okex_klines`\nMODIFY COLUMN `symbol` VARCHAR(10) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `binance_klines`\nMODIFY COLUMN `symbol` VARCHAR(10) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `max_klines`\nMODIFY COLUMN `symbol` VARCHAR(10) NOT NULL;") + if err != nil { + return err + } + + return err +} + +func downKlinesSymbolLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `klines`\nMODIFY COLUMN `symbol` VARCHAR(7) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `okex_klines`\nMODIFY COLUMN `symbol` VARCHAR(7) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `binance_klines`\nMODIFY COLUMN `symbol` VARCHAR(7) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `max_klines`\nMODIFY COLUMN `symbol` VARCHAR(7) NOT NULL;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20210421091430_increase_symbol_length.go b/pkg/migrations/mysql/20210421091430_increase_symbol_length.go new file mode 100644 index 0000000000..8ade5fd7e5 --- /dev/null +++ b/pkg/migrations/mysql/20210421091430_increase_symbol_length.go @@ -0,0 +1,64 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upIncreaseSymbolLength, downIncreaseSymbolLength) + +} + +func upIncreaseSymbolLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `klines`\nMODIFY COLUMN `symbol` VARCHAR(12) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `okex_klines`\nMODIFY COLUMN `symbol` VARCHAR(12) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `binance_klines`\nMODIFY COLUMN `symbol` VARCHAR(12) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `max_klines`\nMODIFY COLUMN `symbol` VARCHAR(12) NOT NULL;") + if err != nil { + return err + } + + return err +} + +func downIncreaseSymbolLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `klines`\nMODIFY COLUMN `symbol` VARCHAR(10) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `okex_klines`\nMODIFY COLUMN `symbol` VARCHAR(10) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `binance_klines`\nMODIFY COLUMN `symbol` VARCHAR(10) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `max_klines`\nMODIFY COLUMN `symbol` VARCHAR(10) NOT NULL;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20210421095030_increase_decimal_length.go b/pkg/migrations/mysql/20210421095030_increase_decimal_length.go new file mode 100644 index 0000000000..3524e01e01 --- /dev/null +++ b/pkg/migrations/mysql/20210421095030_increase_decimal_length.go @@ -0,0 +1,64 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upIncreaseDecimalLength, downIncreaseDecimalLength) + +} + +func upIncreaseDecimalLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `klines`\nMODIFY COLUMN `volume` decimal(20,8) unsigned NOT NULL DEFAULT '0.00000000';") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `okex_klines`\nMODIFY COLUMN `volume` decimal(20,8) unsigned NOT NULL DEFAULT '0.00000000';") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `binance_klines`\nMODIFY COLUMN `volume` decimal(20,8) unsigned NOT NULL DEFAULT '0.00000000';") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `max_klines`\nMODIFY COLUMN `volume` decimal(20,8) unsigned NOT NULL DEFAULT '0.00000000';") + if err != nil { + return err + } + + return err +} + +func downIncreaseDecimalLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `klines`\nMODIFY COLUMN `volume` decimal(16,8) unsigned NOT NULL DEFAULT '0.00000000';") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `okex_klines`\nMODIFY COLUMN `volume` decimal(16,8) unsigned NOT NULL DEFAULT '0.00000000';") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `binance_klines`\nMODIFY COLUMN `volume` decimal(16,8) unsigned NOT NULL DEFAULT '0.00000000';") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `max_klines`\nMODIFY COLUMN `volume` decimal(16,8) unsigned NOT NULL DEFAULT '0.00000000';") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20210531234123_add_kline_taker_buy_columns.go b/pkg/migrations/mysql/20210531234123_add_kline_taker_buy_columns.go new file mode 100644 index 0000000000..3687cf0d0a --- /dev/null +++ b/pkg/migrations/mysql/20210531234123_add_kline_taker_buy_columns.go @@ -0,0 +1,59 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddKlineTakerBuyColumns, downAddKlineTakerBuyColumns) + +} + +func upAddKlineTakerBuyColumns(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `binance_klines`\n ADD COLUMN `quote_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0,\n ADD COLUMN `taker_buy_base_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0,\n ADD COLUMN `taker_buy_quote_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `max_klines`\n ADD COLUMN `quote_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0,\n ADD COLUMN `taker_buy_base_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0,\n ADD COLUMN `taker_buy_quote_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `okex_klines`\n ADD COLUMN `quote_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0,\n ADD COLUMN `taker_buy_base_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0,\n ADD COLUMN `taker_buy_quote_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `klines`\n ADD COLUMN `quote_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0,\n ADD COLUMN `taker_buy_base_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0,\n ADD COLUMN `taker_buy_quote_volume` DECIMAL(32, 8) NOT NULL DEFAULT 0.0;") + if err != nil { + return err + } + + return err +} + +func downAddKlineTakerBuyColumns(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `binance_klines`\n DROP COLUMN `quote_volume`,\n DROP COLUMN `taker_buy_base_volume`,\n DROP COLUMN `taker_buy_quote_volume`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `max_klines`\n DROP COLUMN `quote_volume`,\n DROP COLUMN `taker_buy_base_volume`,\n DROP COLUMN `taker_buy_quote_volume`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `okex_klines`\n DROP COLUMN `quote_volume`,\n DROP COLUMN `taker_buy_base_volume`,\n DROP COLUMN `taker_buy_quote_volume`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20211204014905_update_taker_buy_base_volumn.go b/pkg/migrations/mysql/20211204014905_update_taker_buy_base_volumn.go new file mode 100644 index 0000000000..86131a3f34 --- /dev/null +++ b/pkg/migrations/mysql/20211204014905_update_taker_buy_base_volumn.go @@ -0,0 +1,54 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upUpdateTakerBuyBaseVolumn, downUpdateTakerBuyBaseVolumn) + +} + +func upUpdateTakerBuyBaseVolumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE binance_klines CHANGE taker_buy_base_volume taker_buy_base_volume decimal(32,8) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE max_klines CHANGE taker_buy_base_volume taker_buy_base_volume decimal(32,8) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE okex_klines CHANGE taker_buy_base_volume taker_buy_base_volume decimal(32,8) NOT NULL;") + if err != nil { + return err + } + + return err +} + +func downUpdateTakerBuyBaseVolumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "ALTER TABLE binance_klines CHANGE taker_buy_base_volume taker_buy_base_volume decimal(16,8) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE max_klines CHANGE taker_buy_base_volume taker_buy_base_volume decimal(16,8) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE okex_klines CHANGE taker_buy_base_volume taker_buy_base_volume decimal(16,8) NOT NULL;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20211205162043_add_is_futures_column.go b/pkg/migrations/mysql/20211205162043_add_is_futures_column.go new file mode 100644 index 0000000000..1ee5ac34b7 --- /dev/null +++ b/pkg/migrations/mysql/20211205162043_add_is_futures_column.go @@ -0,0 +1,44 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddIsFuturesColumn, downAddIsFuturesColumn) + +} + +func upAddIsFuturesColumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` ADD COLUMN `is_futures` BOOLEAN NOT NULL DEFAULT FALSE;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `orders` ADD COLUMN `is_futures` BOOLEAN NOT NULL DEFAULT FALSE;") + if err != nil { + return err + } + + return err +} + +func downAddIsFuturesColumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` DROP COLUMN `is_futures`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `orders` DROP COLUMN `is_futures`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20211211020303_add_ftx_kline.go b/pkg/migrations/mysql/20211211020303_add_ftx_kline.go new file mode 100644 index 0000000000..f13600a40b --- /dev/null +++ b/pkg/migrations/mysql/20211211020303_add_ftx_kline.go @@ -0,0 +1,39 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddFtxKline, downAddFtxKline) + +} + +func upAddFtxKline(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "create table if not exists ftx_klines\n(\n gid bigint unsigned auto_increment\n primary key,\n exchange varchar(10) not null,\n start_time datetime(3) not null,\n end_time datetime(3) not null,\n `interval` varchar(3) not null,\n symbol varchar(20) not null,\n open decimal(20,8) unsigned not null,\n high decimal(20,8) unsigned not null,\n low decimal(20,8) unsigned not null,\n close decimal(20,8) unsigned default 0.00000000 not null,\n volume decimal(20,8) unsigned default 0.00000000 not null,\n closed tinyint(1) default 1 not null,\n last_trade_id int unsigned default '0' not null,\n num_trades int unsigned default '0' not null,\n quote_volume decimal(32,4) default 0.0000 not null,\n taker_buy_base_volume decimal(32,8) not null,\n taker_buy_quote_volume decimal(32,4) default 0.0000 not null\n );") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "create index klines_end_time_symbol_interval\n on ftx_klines (end_time, symbol, `interval`);") + if err != nil { + return err + } + + return err +} + +func downAddFtxKline(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "drop table ftx_klines;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20211211034819_add_nav_history_details.go b/pkg/migrations/mysql/20211211034819_add_nav_history_details.go new file mode 100644 index 0000000000..f98de36fdf --- /dev/null +++ b/pkg/migrations/mysql/20211211034819_add_nav_history_details.go @@ -0,0 +1,39 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddNavHistoryDetails, downAddNavHistoryDetails) + +} + +func upAddNavHistoryDetails(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE nav_history_details\n(\n gid bigint unsigned auto_increment PRIMARY KEY,\n exchange VARCHAR(30) NOT NULL,\n subaccount VARCHAR(30) NOT NULL,\n time DATETIME(3) NOT NULL,\n currency VARCHAR(12) NOT NULL,\n balance_in_usd DECIMAL(32, 8) UNSIGNED DEFAULT 0.00000000 NOT NULL,\n balance_in_btc DECIMAL(32, 8) UNSIGNED DEFAULT 0.00000000 NOT NULL,\n balance DECIMAL(32, 8) UNSIGNED DEFAULT 0.00000000 NOT NULL,\n available DECIMAL(32, 8) UNSIGNED DEFAULT 0.00000000 NOT NULL,\n locked DECIMAL(32, 8) UNSIGNED DEFAULT 0.00000000 NOT NULL\n);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX idx_nav_history_details\n on nav_history_details (time, currency, exchange);") + if err != nil { + return err + } + + return err +} + +func downAddNavHistoryDetails(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE nav_history_details;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20211211103657_update_fee_currency_length.go b/pkg/migrations/mysql/20211211103657_update_fee_currency_length.go new file mode 100644 index 0000000000..36c21ae84b --- /dev/null +++ b/pkg/migrations/mysql/20211211103657_update_fee_currency_length.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upUpdateFeeCurrencyLength, downUpdateFeeCurrencyLength) + +} + +func upUpdateFeeCurrencyLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} + +func downUpdateFeeCurrencyLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20211226022411_add_kucoin_klines.go b/pkg/migrations/mysql/20211226022411_add_kucoin_klines.go new file mode 100644 index 0000000000..dfb281d3bd --- /dev/null +++ b/pkg/migrations/mysql/20211226022411_add_kucoin_klines.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddKucoinKlines, downAddKucoinKlines) + +} + +func upAddKucoinKlines(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `kucoin_klines` LIKE `binance_klines`;") + if err != nil { + return err + } + + return err +} + +func downAddKucoinKlines(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE `kucoin_klines`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20220304153317_add_profit_table.go b/pkg/migrations/mysql/20220304153317_add_profit_table.go new file mode 100644 index 0000000000..dd5d4e082c --- /dev/null +++ b/pkg/migrations/mysql/20220304153317_add_profit_table.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddProfitTable, downAddProfitTable) + +} + +func upAddProfitTable(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `profits`\n(\n `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,\n `strategy` VARCHAR(32) NOT NULL,\n `strategy_instance_id` VARCHAR(64) NOT NULL,\n `symbol` VARCHAR(8) NOT NULL,\n -- average_cost is the position average cost\n `average_cost` DECIMAL(16, 8) UNSIGNED NOT NULL,\n -- profit is the pnl (profit and loss)\n `profit` DECIMAL(16, 8) NOT NULL,\n -- net_profit is the pnl (profit and loss)\n `net_profit` DECIMAL(16, 8) NOT NULL,\n -- profit_margin is the pnl (profit and loss)\n `profit_margin` DECIMAL(16, 8) NOT NULL,\n -- net_profit_margin is the pnl (profit and loss)\n `net_profit_margin` DECIMAL(16, 8) NOT NULL,\n `quote_currency` VARCHAR(10) NOT NULL,\n `base_currency` VARCHAR(10) NOT NULL,\n -- -------------------------------------------------------\n -- embedded trade data --\n -- -------------------------------------------------------\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n `is_futures` BOOLEAN NOT NULL DEFAULT FALSE,\n `is_margin` BOOLEAN NOT NULL DEFAULT FALSE,\n `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE,\n `trade_id` BIGINT UNSIGNED NOT NULL,\n -- side is the side of the trade that makes profit\n `side` VARCHAR(4) NOT NULL DEFAULT '',\n `is_buyer` BOOLEAN NOT NULL DEFAULT FALSE,\n `is_maker` BOOLEAN NOT NULL DEFAULT FALSE,\n -- price is the price of the trade that makes profit\n `price` DECIMAL(16, 8) UNSIGNED NOT NULL,\n -- quantity is the quantity of the trade that makes profit\n `quantity` DECIMAL(16, 8) UNSIGNED NOT NULL,\n -- quote_quantity is the quote quantity of the trade that makes profit\n `quote_quantity` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `traded_at` DATETIME(3) NOT NULL,\n -- fee\n `fee_in_usd` DECIMAL(16, 8),\n `fee` DECIMAL(16, 8) NOT NULL,\n `fee_currency` VARCHAR(10) NOT NULL,\n PRIMARY KEY (`gid`),\n UNIQUE KEY `trade_id` (`trade_id`)\n);") + if err != nil { + return err + } + + return err +} + +func downAddProfitTable(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `profits`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20220307132917_add_positions.go b/pkg/migrations/mysql/20220307132917_add_positions.go new file mode 100644 index 0000000000..6670419fd4 --- /dev/null +++ b/pkg/migrations/mysql/20220307132917_add_positions.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddPositions, downAddPositions) + +} + +func upAddPositions(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `positions`\n(\n `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,\n `strategy` VARCHAR(32) NOT NULL,\n `strategy_instance_id` VARCHAR(64) NOT NULL,\n `symbol` VARCHAR(20) NOT NULL,\n `quote_currency` VARCHAR(10) NOT NULL,\n `base_currency` VARCHAR(10) NOT NULL,\n -- average_cost is the position average cost\n `average_cost` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `base` DECIMAL(16, 8) NOT NULL,\n `quote` DECIMAL(16, 8) NOT NULL,\n `profit` DECIMAL(16, 8) NULL,\n -- trade related columns\n `trade_id` BIGINT UNSIGNED NOT NULL, -- the trade id in the exchange\n `side` VARCHAR(4) NOT NULL, -- side of the trade\n `exchange` VARCHAR(12) NOT NULL, -- exchange of the trade\n `traded_at` DATETIME(3) NOT NULL, -- millisecond timestamp\n PRIMARY KEY (`gid`),\n UNIQUE KEY `trade_id` (`trade_id`, `side`, `exchange`)\n);") + if err != nil { + return err + } + + return err +} + +func downAddPositions(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `positions`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20220317125555_fix_trade_indexes.go b/pkg/migrations/mysql/20220317125555_fix_trade_indexes.go new file mode 100644 index 0000000000..3e0e519a60 --- /dev/null +++ b/pkg/migrations/mysql/20220317125555_fix_trade_indexes.go @@ -0,0 +1,84 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upFixTradeIndexes, downFixTradeIndexes) + +} + +func upFixTradeIndexes(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "DROP INDEX trades_symbol ON trades;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX trades_symbol_fee_currency ON trades;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX trades_traded_at_symbol ON trades;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_traded_at ON trades (traded_at, symbol, exchange, id, fee_currency, fee);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_id_traded_at ON trades (id, traded_at);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_order_id_traded_at ON trades (order_id, traded_at);") + if err != nil { + return err + } + + return err +} + +func downFixTradeIndexes(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP INDEX trades_traded_at ON trades;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX trades_id_traded_at ON trades;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX trades_order_id_traded_at ON trades;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_symbol ON trades (exchange, symbol);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_symbol_fee_currency ON trades (exchange, symbol, fee_currency, traded_at);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_traded_at_symbol ON trades (exchange, traded_at, symbol);") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20220419121046_fix_fee_column.go b/pkg/migrations/mysql/20220419121046_fix_fee_column.go new file mode 100644 index 0000000000..aa544c92e1 --- /dev/null +++ b/pkg/migrations/mysql/20220419121046_fix_fee_column.go @@ -0,0 +1,44 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upFixFeeColumn, downFixFeeColumn) + +} + +func upFixFeeColumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE trades\n CHANGE fee fee DECIMAL(16, 8) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE profits\n CHANGE fee fee DECIMAL(16, 8) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE profits\n CHANGE fee_in_usd fee_in_usd DECIMAL(16, 8);") + if err != nil { + return err + } + + return err +} + +func downFixFeeColumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20220503144849_add_margin_info_to_nav.go b/pkg/migrations/mysql/20220503144849_add_margin_info_to_nav.go new file mode 100644 index 0000000000..98b25028aa --- /dev/null +++ b/pkg/migrations/mysql/20220503144849_add_margin_info_to_nav.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddMarginInfoToNav, downAddMarginInfoToNav) + +} + +func upAddMarginInfoToNav(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `nav_history_details`\n ADD COLUMN `session` VARCHAR(30) NOT NULL,\n ADD COLUMN `is_margin` BOOLEAN NOT NULL DEFAULT FALSE,\n ADD COLUMN `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE,\n ADD COLUMN `isolated_symbol` VARCHAR(30) NOT NULL DEFAULT '',\n ADD COLUMN `net_asset` DECIMAL(32, 8) UNSIGNED DEFAULT 0.00000000 NOT NULL,\n ADD COLUMN `borrowed` DECIMAL(32, 8) UNSIGNED DEFAULT 0.00000000 NOT NULL,\n ADD COLUMN `price_in_usd` DECIMAL(32, 8) UNSIGNED DEFAULT 0.00000000 NOT NULL\n;") + if err != nil { + return err + } + + return err +} + +func downAddMarginInfoToNav(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `nav_history_details`\n DROP COLUMN `session`,\n DROP COLUMN `net_asset`,\n DROP COLUMN `borrowed`,\n DROP COLUMN `price_in_usd`,\n DROP COLUMN `is_margin`,\n DROP COLUMN `is_isolated`,\n DROP COLUMN `isolated_symbol`\n;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20220504184155_fix_net_asset_column.go b/pkg/migrations/mysql/20220504184155_fix_net_asset_column.go new file mode 100644 index 0000000000..c986d66fe9 --- /dev/null +++ b/pkg/migrations/mysql/20220504184155_fix_net_asset_column.go @@ -0,0 +1,39 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upFixNetAssetColumn, downFixNetAssetColumn) + +} + +func upFixNetAssetColumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `nav_history_details`\n MODIFY COLUMN `net_asset` DECIMAL(32, 8) DEFAULT 0.00000000 NOT NULL,\n CHANGE COLUMN `balance_in_usd` `net_asset_in_usd` DECIMAL(32, 2) DEFAULT 0.00000000 NOT NULL,\n CHANGE COLUMN `balance_in_btc` `net_asset_in_btc` DECIMAL(32, 20) DEFAULT 0.00000000 NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `nav_history_details`\n ADD COLUMN `interest` DECIMAL(32, 20) UNSIGNED DEFAULT 0.00000000 NOT NULL;") + if err != nil { + return err + } + + return err +} + +func downFixNetAssetColumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `nav_history_details`\n DROP COLUMN `interest`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20220512170322_fix_profit_symbol_length.go b/pkg/migrations/mysql/20220512170322_fix_profit_symbol_length.go new file mode 100644 index 0000000000..1196c4d06e --- /dev/null +++ b/pkg/migrations/mysql/20220512170322_fix_profit_symbol_length.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upFixProfitSymbolLength, downFixProfitSymbolLength) + +} + +func upFixProfitSymbolLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE profits\n CHANGE symbol symbol VARCHAR(20) NOT NULL;") + if err != nil { + return err + } + + return err +} + +func downFixProfitSymbolLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20220520140707_kline_unique_idx.go b/pkg/migrations/mysql/20220520140707_kline_unique_idx.go new file mode 100644 index 0000000000..84dfb50303 --- /dev/null +++ b/pkg/migrations/mysql/20220520140707_kline_unique_idx.go @@ -0,0 +1,74 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upKlineUniqueIdx, downKlineUniqueIdx) + +} + +func upKlineUniqueIdx(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE UNIQUE INDEX idx_kline_binance_unique\n ON binance_klines (`symbol`, `interval`, `start_time`);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE UNIQUE INDEX idx_kline_max_unique\n ON max_klines (`symbol`, `interval`, `start_time`);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE UNIQUE INDEX `idx_kline_ftx_unique`\n ON ftx_klines (`symbol`, `interval`, `start_time`);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE UNIQUE INDEX `idx_kline_kucoin_unique`\n ON kucoin_klines (`symbol`, `interval`, `start_time`);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE UNIQUE INDEX `idx_kline_okex_unique`\n ON okex_klines (`symbol`, `interval`, `start_time`);") + if err != nil { + return err + } + + return err +} + +func downKlineUniqueIdx(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP INDEX `idx_kline_ftx_unique` ON `ftx_klines`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX `idx_kline_max_unique` ON `max_klines`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX `idx_kline_binance_unique` ON `binance_klines`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX `idx_kline_kucoin_unique` ON `kucoin_klines`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX `idx_kline_okex_unique` ON `okex_klines`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20220531012226_margin_loans.go b/pkg/migrations/mysql/20220531012226_margin_loans.go new file mode 100644 index 0000000000..1857a18ee4 --- /dev/null +++ b/pkg/migrations/mysql/20220531012226_margin_loans.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upMarginLoans, downMarginLoans) + +} + +func upMarginLoans(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `margin_loans`\n(\n `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,\n `transaction_id` BIGINT UNSIGNED NOT NULL,\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n `asset` VARCHAR(24) NOT NULL DEFAULT '',\n `isolated_symbol` VARCHAR(24) NOT NULL DEFAULT '',\n -- quantity is the quantity of the trade that makes profit\n `principle` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `time` DATETIME(3) NOT NULL,\n PRIMARY KEY (`gid`),\n UNIQUE KEY (`transaction_id`)\n);") + if err != nil { + return err + } + + return err +} + +func downMarginLoans(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `margin_loans`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20220531013327_margin_repays.go b/pkg/migrations/mysql/20220531013327_margin_repays.go new file mode 100644 index 0000000000..66582d9770 --- /dev/null +++ b/pkg/migrations/mysql/20220531013327_margin_repays.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upMarginRepays, downMarginRepays) + +} + +func upMarginRepays(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `margin_repays`\n(\n `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,\n `transaction_id` BIGINT UNSIGNED NOT NULL,\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n `asset` VARCHAR(24) NOT NULL DEFAULT '',\n `isolated_symbol` VARCHAR(24) NOT NULL DEFAULT '',\n -- quantity is the quantity of the trade that makes profit\n `principle` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `time` DATETIME(3) NOT NULL,\n PRIMARY KEY (`gid`),\n UNIQUE KEY (`transaction_id`)\n);") + if err != nil { + return err + } + + return err +} + +func downMarginRepays(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `margin_repays`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20220531013542_margin_interests.go b/pkg/migrations/mysql/20220531013542_margin_interests.go new file mode 100644 index 0000000000..b6f3be1519 --- /dev/null +++ b/pkg/migrations/mysql/20220531013542_margin_interests.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upMarginInterests, downMarginInterests) + +} + +func upMarginInterests(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `margin_interests`\n(\n `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n `asset` VARCHAR(24) NOT NULL DEFAULT '',\n `isolated_symbol` VARCHAR(24) NOT NULL DEFAULT '',\n `principle` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `interest` DECIMAL(20, 16) UNSIGNED NOT NULL,\n `interest_rate` DECIMAL(20, 16) UNSIGNED NOT NULL,\n `time` DATETIME(3) NOT NULL,\n PRIMARY KEY (`gid`)\n);") + if err != nil { + return err + } + + return err +} + +func downMarginInterests(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `margin_interests`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/20220531015005_margin_liquidations.go b/pkg/migrations/mysql/20220531015005_margin_liquidations.go new file mode 100644 index 0000000000..194c0a67fd --- /dev/null +++ b/pkg/migrations/mysql/20220531015005_margin_liquidations.go @@ -0,0 +1,34 @@ +package mysql + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upMarginLiquidations, downMarginLiquidations) + +} + +func upMarginLiquidations(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `margin_liquidations`\n(\n `gid` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n `symbol` VARCHAR(24) NOT NULL DEFAULT '',\n `order_id` BIGINT UNSIGNED NOT NULL,\n `is_isolated` BOOL NOT NULL DEFAULT false,\n `average_price` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `price` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `quantity` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `executed_quantity` DECIMAL(16, 8) UNSIGNED NOT NULL,\n `side` VARCHAR(5) NOT NULL DEFAULT '',\n `time_in_force` VARCHAR(5) NOT NULL DEFAULT '',\n `time` DATETIME(3) NOT NULL,\n PRIMARY KEY (`gid`),\n UNIQUE KEY (`order_id`, `exchange`)\n);") + if err != nil { + return err + } + + return err +} + +func downMarginLiquidations(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `margin_liquidations`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/mysql/migration_api.go b/pkg/migrations/mysql/migration_api.go new file mode 100644 index 0000000000..b68c5f81d9 --- /dev/null +++ b/pkg/migrations/mysql/migration_api.go @@ -0,0 +1,87 @@ +package mysql + +import ( + "fmt" + "log" + "runtime" + "strings" + + "github.com/c9s/rockhopper" +) + +var registeredGoMigrations map[int64]*rockhopper.Migration + +func MergeMigrationsMap(ms map[int64]*rockhopper.Migration) { + for k, m := range ms { + if _, ok := registeredGoMigrations[k]; !ok { + registeredGoMigrations[k] = m + } else { + log.Printf("the migration key %d is duplicated: %+v", k, m) + } + } +} + +func GetMigrationsMap() map[int64]*rockhopper.Migration { + return registeredGoMigrations +} + +// SortedMigrations builds up the migration objects, sort them by timestamp and return as a slice +func SortedMigrations() rockhopper.MigrationSlice { + return Migrations() +} + +// Migrations builds up the migration objects, sort them by timestamp and return as a slice +func Migrations() rockhopper.MigrationSlice { + var migrations = rockhopper.MigrationSlice{} + for _, migration := range registeredGoMigrations { + migrations = append(migrations, migration) + } + + return migrations.SortAndConnect() +} + +// AddMigration adds a migration with its runtime caller information +func AddMigration(up, down rockhopper.TransactionHandler) { + pc, filename, _, _ := runtime.Caller(1) + + funcName := runtime.FuncForPC(pc).Name() + packageName := _parseFuncPackageName(funcName) + AddNamedMigration(packageName, filename, up, down) +} + +// parseFuncPackageName parses the package name from a given runtime caller function name +func _parseFuncPackageName(funcName string) string { + lastSlash := strings.LastIndexByte(funcName, '/') + if lastSlash < 0 { + lastSlash = 0 + } + + lastDot := strings.LastIndexByte(funcName[lastSlash:], '.') + lastSlash + packageName := funcName[:lastDot] + return packageName +} + +// AddNamedMigration adds a named migration to the registered go migration map +func AddNamedMigration(packageName, filename string, up, down rockhopper.TransactionHandler) { + if registeredGoMigrations == nil { + registeredGoMigrations = make(map[int64]*rockhopper.Migration) + } + + v, _ := rockhopper.FileNumericComponent(filename) + + migration := &rockhopper.Migration{ + Package: packageName, + Registered: true, + + Version: v, + UpFn: up, + DownFn: down, + Source: filename, + UseTx: true, + } + + if existing, ok := registeredGoMigrations[v]; ok { + panic(fmt.Sprintf("failed to add migration %q: version conflicts with %q", filename, existing.Source)) + } + registeredGoMigrations[v] = migration +} diff --git a/pkg/migrations/mysql/migration_api_test.go b/pkg/migrations/mysql/migration_api_test.go new file mode 100644 index 0000000000..9864095ce0 --- /dev/null +++ b/pkg/migrations/mysql/migration_api_test.go @@ -0,0 +1,20 @@ +package mysql + +import ( + "testing" + + "github.com/c9s/rockhopper" + "github.com/stretchr/testify/assert" +) + +func TestGetMigrationsMap(t *testing.T) { + mm := GetMigrationsMap() + assert.NotEmpty(t, mm) +} + +func TestMergeMigrationsMap(t *testing.T) { + MergeMigrationsMap(map[int64]*rockhopper.Migration{ + 2: {}, + 3: {}, + }) +} diff --git a/pkg/migrations/sqlite3/20200721225616_trades.go b/pkg/migrations/sqlite3/20200721225616_trades.go new file mode 100644 index 0000000000..dbc677683e --- /dev/null +++ b/pkg/migrations/sqlite3/20200721225616_trades.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upTrades, downTrades) + +} + +func upTrades(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `trades`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n `id` INTEGER,\n `exchange` TEXT NOT NULL DEFAULT '',\n `symbol` TEXT NOT NULL,\n `price` DECIMAL(16, 8) NOT NULL,\n `quantity` DECIMAL(16, 8) NOT NULL,\n `quote_quantity` DECIMAL(16, 8) NOT NULL,\n `fee` DECIMAL(16, 8) NOT NULL,\n `fee_currency` VARCHAR(4) NOT NULL,\n `is_buyer` BOOLEAN NOT NULL DEFAULT FALSE,\n `is_maker` BOOLEAN NOT NULL DEFAULT FALSE,\n `side` VARCHAR(4) NOT NULL DEFAULT '',\n `traded_at` DATETIME(3) NOT NULL\n);") + if err != nil { + return err + } + + return err +} + +func downTrades(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `trades`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/20200819054742_trade_index.go b/pkg/migrations/sqlite3/20200819054742_trade_index.go similarity index 85% rename from pkg/migrations/20200819054742_trade_index.go rename to pkg/migrations/sqlite3/20200819054742_trade_index.go index 59c7b89ef3..50d3a95142 100644 --- a/pkg/migrations/20200819054742_trade_index.go +++ b/pkg/migrations/sqlite3/20200819054742_trade_index.go @@ -1,4 +1,4 @@ -package migrations +package sqlite3 import ( "context" @@ -7,7 +7,8 @@ import ( ) func init() { - rockhopper.AddMigration(upTradeIndex, downTradeIndex) + AddMigration(upTradeIndex, downTradeIndex) + } func upTradeIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { @@ -34,17 +35,17 @@ func upTradeIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { func downTradeIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { // This code is executed when the migration is rolled back. - _, err = tx.ExecContext(ctx, "DROP INDEX trades_symbol ON trades;") + _, err = tx.ExecContext(ctx, "DROP INDEX trades_symbol;") if err != nil { return err } - _, err = tx.ExecContext(ctx, "DROP INDEX trades_symbol_fee_currency ON trades;") + _, err = tx.ExecContext(ctx, "DROP INDEX trades_symbol_fee_currency;") if err != nil { return err } - _, err = tx.ExecContext(ctx, "DROP INDEX trades_traded_at_symbol ON trades;") + _, err = tx.ExecContext(ctx, "DROP INDEX trades_traded_at_symbol;") if err != nil { return err } diff --git a/pkg/migrations/sqlite3/20201102222546_orders.go b/pkg/migrations/sqlite3/20201102222546_orders.go new file mode 100644 index 0000000000..1930783d5a --- /dev/null +++ b/pkg/migrations/sqlite3/20201102222546_orders.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upOrders, downOrders) + +} + +func upOrders(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `orders`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n `exchange` VARCHAR NOT NULL DEFAULT '',\n -- order_id is the order id returned from the exchange\n `order_id` INTEGER NOT NULL,\n `client_order_id` VARCHAR NOT NULL DEFAULT '',\n `order_type` VARCHAR NOT NULL,\n `symbol` VARCHAR NOT NULL,\n `status` VARCHAR NOT NULL,\n `time_in_force` VARCHAR NOT NULL,\n `price` DECIMAL(16, 8) NOT NULL,\n `stop_price` DECIMAL(16, 8) NOT NULL,\n `quantity` DECIMAL(16, 8) NOT NULL,\n `executed_quantity` DECIMAL(16, 8) NOT NULL DEFAULT 0.0,\n `side` VARCHAR NOT NULL DEFAULT '',\n `is_working` BOOLEAN NOT NULL DEFAULT FALSE,\n `created_at` DATETIME(3) NOT NULL,\n `updated_at` DATETIME(3) NOT NULL DEFAULT CURRENT_TIMESTAMP\n);") + if err != nil { + return err + } + + return err +} + +func downOrders(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `orders`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20201103173342_trades_add_order_id.go b/pkg/migrations/sqlite3/20201103173342_trades_add_order_id.go new file mode 100644 index 0000000000..05608ffb55 --- /dev/null +++ b/pkg/migrations/sqlite3/20201103173342_trades_add_order_id.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upTradesAddOrderId, downTradesAddOrderId) + +} + +func upTradesAddOrderId(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` ADD COLUMN `order_id` INTEGER;") + if err != nil { + return err + } + + return err +} + +func downTradesAddOrderId(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` RENAME COLUMN `order_id` TO `order_id_deleted`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/20201105092857_trades_index_fix.go b/pkg/migrations/sqlite3/20201105092857_trades_index_fix.go similarity index 72% rename from pkg/migrations/20201105092857_trades_index_fix.go rename to pkg/migrations/sqlite3/20201105092857_trades_index_fix.go index 72b90f3212..969449027f 100644 --- a/pkg/migrations/20201105092857_trades_index_fix.go +++ b/pkg/migrations/sqlite3/20201105092857_trades_index_fix.go @@ -1,4 +1,4 @@ -package migrations +package sqlite3 import ( "context" @@ -7,23 +7,24 @@ import ( ) func init() { - rockhopper.AddMigration(upTradesIndexFix, downTradesIndexFix) + AddMigration(upTradesIndexFix, downTradesIndexFix) + } func upTradesIndexFix(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { // This code is executed when the migration is applied. - _, err = tx.ExecContext(ctx, "DROP INDEX trades_symbol ON trades;") + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS trades_symbol;") if err != nil { return err } - _, err = tx.ExecContext(ctx, "DROP INDEX trades_symbol_fee_currency ON trades;") + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS trades_symbol_fee_currency;") if err != nil { return err } - _, err = tx.ExecContext(ctx, "DROP INDEX trades_traded_at_symbol ON trades;") + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS trades_traded_at_symbol;") if err != nil { return err } @@ -49,17 +50,17 @@ func upTradesIndexFix(ctx context.Context, tx rockhopper.SQLExecutor) (err error func downTradesIndexFix(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { // This code is executed when the migration is rolled back. - _, err = tx.ExecContext(ctx, "DROP INDEX trades_symbol ON trades;") + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS trades_symbol;") if err != nil { return err } - _, err = tx.ExecContext(ctx, "DROP INDEX trades_symbol_fee_currency ON trades;") + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS trades_symbol_fee_currency;") if err != nil { return err } - _, err = tx.ExecContext(ctx, "DROP INDEX trades_traded_at_symbol ON trades;") + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS trades_traded_at_symbol;") if err != nil { return err } diff --git a/pkg/migrations/20201105093056_orders_add_index.go b/pkg/migrations/sqlite3/20201105093056_orders_add_index.go similarity index 76% rename from pkg/migrations/20201105093056_orders_add_index.go rename to pkg/migrations/sqlite3/20201105093056_orders_add_index.go index 9d12511698..fcb730bb5b 100644 --- a/pkg/migrations/20201105093056_orders_add_index.go +++ b/pkg/migrations/sqlite3/20201105093056_orders_add_index.go @@ -1,4 +1,4 @@ -package migrations +package sqlite3 import ( "context" @@ -7,7 +7,8 @@ import ( ) func init() { - rockhopper.AddMigration(upOrdersAddIndex, downOrdersAddIndex) + AddMigration(upOrdersAddIndex, downOrdersAddIndex) + } func upOrdersAddIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { @@ -29,12 +30,12 @@ func upOrdersAddIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error func downOrdersAddIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { // This code is executed when the migration is rolled back. - _, err = tx.ExecContext(ctx, "DROP INDEX orders_symbol ON orders;") + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS orders_symbol;") if err != nil { return err } - _, err = tx.ExecContext(ctx, "DROP INDEX orders_order_id ON orders;") + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS orders_order_id;") if err != nil { return err } diff --git a/pkg/migrations/sqlite3/20201106114742_klines.go b/pkg/migrations/sqlite3/20201106114742_klines.go new file mode 100644 index 0000000000..5678f77d8b --- /dev/null +++ b/pkg/migrations/sqlite3/20201106114742_klines.go @@ -0,0 +1,74 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upKlines, downKlines) + +} + +func upKlines(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `klines`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n `exchange` VARCHAR(10) NOT NULL,\n `start_time` DATETIME(3) NOT NULL,\n `end_time` DATETIME(3) NOT NULL,\n `interval` VARCHAR(3) NOT NULL,\n `symbol` VARCHAR(7) NOT NULL,\n `open` DECIMAL(16, 8) NOT NULL,\n `high` DECIMAL(16, 8) NOT NULL,\n `low` DECIMAL(16, 8) NOT NULL,\n `close` DECIMAL(16, 8) NOT NULL DEFAULT 0.0,\n `volume` DECIMAL(16, 8) NOT NULL DEFAULT 0.0,\n `closed` BOOLEAN NOT NULL DEFAULT TRUE,\n `last_trade_id` INT NOT NULL DEFAULT 0,\n `num_trades` INT NOT NULL DEFAULT 0\n);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE TABLE `okex_klines`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n `exchange` VARCHAR(10) NOT NULL,\n `start_time` DATETIME(3) NOT NULL,\n `end_time` DATETIME(3) NOT NULL,\n `interval` VARCHAR(3) NOT NULL,\n `symbol` VARCHAR(7) NOT NULL,\n `open` DECIMAL(16, 8) NOT NULL,\n `high` DECIMAL(16, 8) NOT NULL,\n `low` DECIMAL(16, 8) NOT NULL,\n `close` DECIMAL(16, 8) NOT NULL DEFAULT 0.0,\n `volume` DECIMAL(16, 8) NOT NULL DEFAULT 0.0,\n `closed` BOOLEAN NOT NULL DEFAULT TRUE,\n `last_trade_id` INT NOT NULL DEFAULT 0,\n `num_trades` INT NOT NULL DEFAULT 0\n);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE TABLE `binance_klines`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n `exchange` VARCHAR(10) NOT NULL,\n `start_time` DATETIME(3) NOT NULL,\n `end_time` DATETIME(3) NOT NULL,\n `interval` VARCHAR(3) NOT NULL,\n `symbol` VARCHAR(7) NOT NULL,\n `open` DECIMAL(16, 8) NOT NULL,\n `high` DECIMAL(16, 8) NOT NULL,\n `low` DECIMAL(16, 8) NOT NULL,\n `close` DECIMAL(16, 8) NOT NULL DEFAULT 0.0,\n `volume` DECIMAL(16, 8) NOT NULL DEFAULT 0.0,\n `closed` BOOLEAN NOT NULL DEFAULT TRUE,\n `last_trade_id` INT NOT NULL DEFAULT 0,\n `num_trades` INT NOT NULL DEFAULT 0\n);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE TABLE `max_klines`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n `exchange` VARCHAR(10) NOT NULL,\n `start_time` DATETIME(3) NOT NULL,\n `end_time` DATETIME(3) NOT NULL,\n `interval` VARCHAR(3) NOT NULL,\n `symbol` VARCHAR(7) NOT NULL,\n `open` DECIMAL(16, 8) NOT NULL,\n `high` DECIMAL(16, 8) NOT NULL,\n `low` DECIMAL(16, 8) NOT NULL,\n `close` DECIMAL(16, 8) NOT NULL DEFAULT 0.0,\n `volume` DECIMAL(16, 8) NOT NULL DEFAULT 0.0,\n `closed` BOOLEAN NOT NULL DEFAULT TRUE,\n `last_trade_id` INT NOT NULL DEFAULT 0,\n `num_trades` INT NOT NULL DEFAULT 0\n);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX `klines_end_time_symbol_interval` ON `klines` (`end_time`, `symbol`, `interval`);\nCREATE INDEX `binance_klines_end_time_symbol_interval` ON `binance_klines` (`end_time`, `symbol`, `interval`);\nCREATE INDEX `okex_klines_end_time_symbol_interval` ON `okex_klines` (`end_time`, `symbol`, `interval`);\nCREATE INDEX `max_klines_end_time_symbol_interval` ON `max_klines` (`end_time`, `symbol`, `interval`);") + if err != nil { + return err + } + + return err +} + +func downKlines(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS `klines_end_time_symbol_interval`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `binance_klines`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `okex_klines`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `max_klines`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `klines`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20201211175751_fix_symbol_length.go b/pkg/migrations/sqlite3/20201211175751_fix_symbol_length.go new file mode 100644 index 0000000000..3b029b2cad --- /dev/null +++ b/pkg/migrations/sqlite3/20201211175751_fix_symbol_length.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upFixSymbolLength, downFixSymbolLength) + +} + +func upFixSymbolLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} + +func downFixSymbolLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20210118163847_fix_unique_index.go b/pkg/migrations/sqlite3/20210118163847_fix_unique_index.go new file mode 100644 index 0000000000..649e9cd7fa --- /dev/null +++ b/pkg/migrations/sqlite3/20210118163847_fix_unique_index.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upFixUniqueIndex, downFixUniqueIndex) + +} + +func upFixUniqueIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE UNIQUE INDEX `trade_unique_id` ON `trades` (`exchange`,`symbol`, `side`, `id`);") + if err != nil { + return err + } + + return err +} + +func downFixUniqueIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS `trade_unique_id`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20210119232826_add_margin_columns.go b/pkg/migrations/sqlite3/20210119232826_add_margin_columns.go new file mode 100644 index 0000000000..a04974d78e --- /dev/null +++ b/pkg/migrations/sqlite3/20210119232826_add_margin_columns.go @@ -0,0 +1,64 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddMarginColumns, downAddMarginColumns) + +} + +func upAddMarginColumns(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` ADD COLUMN `is_margin` BOOLEAN NOT NULL DEFAULT FALSE;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` ADD COLUMN `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `orders` ADD COLUMN `is_margin` BOOLEAN NOT NULL DEFAULT FALSE;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `orders` ADD COLUMN `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE;") + if err != nil { + return err + } + + return err +} + +func downAddMarginColumns(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` RENAME COLUMN `is_margin` TO `is_margin_deleted`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` RENAME COLUMN `is_isolated` TO `is_isolated_deleted`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `orders` RENAME COLUMN `is_margin` TO `is_margin_deleted`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `orders` RENAME COLUMN `is_isolated` TO `is_isolated_deleted`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20210129182704_trade_price_quantity_index.go b/pkg/migrations/sqlite3/20210129182704_trade_price_quantity_index.go new file mode 100644 index 0000000000..33c06c1554 --- /dev/null +++ b/pkg/migrations/sqlite3/20210129182704_trade_price_quantity_index.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upTradePriceQuantityIndex, downTradePriceQuantityIndex) + +} + +func upTradePriceQuantityIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_price_quantity ON trades (order_id,price,quantity);") + if err != nil { + return err + } + + return err +} + +func downTradePriceQuantityIndex(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP INDEX trades_price_quantity;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20210215203111_add_pnl_column.go b/pkg/migrations/sqlite3/20210215203111_add_pnl_column.go new file mode 100644 index 0000000000..a752d057ec --- /dev/null +++ b/pkg/migrations/sqlite3/20210215203111_add_pnl_column.go @@ -0,0 +1,44 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddPnlColumn, downAddPnlColumn) + +} + +func upAddPnlColumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` ADD COLUMN `pnl` DECIMAL NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` ADD COLUMN `strategy` TEXT;") + if err != nil { + return err + } + + return err +} + +func downAddPnlColumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` RENAME COLUMN `pnl` TO `pnl_deleted`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` RENAME COLUMN `strategy` TO `strategy_deleted`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20210223080622_add_rewards_table.go b/pkg/migrations/sqlite3/20210223080622_add_rewards_table.go new file mode 100644 index 0000000000..5118f443f5 --- /dev/null +++ b/pkg/migrations/sqlite3/20210223080622_add_rewards_table.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddRewardsTable, downAddRewardsTable) + +} + +func upAddRewardsTable(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `rewards`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n -- for exchange\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n -- reward record id\n `uuid` VARCHAR(32) NOT NULL,\n `reward_type` VARCHAR(24) NOT NULL DEFAULT '',\n -- currency symbol, BTC, MAX, USDT ... etc\n `currency` VARCHAR(5) NOT NULL,\n -- the quantity of the rewards\n `quantity` DECIMAL(16, 8) NOT NULL,\n `state` VARCHAR(5) NOT NULL,\n `created_at` DATETIME NOT NULL,\n `spent` BOOLEAN NOT NULL DEFAULT FALSE,\n `note` TEXT NULL\n);") + if err != nil { + return err + } + + return err +} + +func downAddRewardsTable(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `rewards`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20210301140656_add_withdraws_table.go b/pkg/migrations/sqlite3/20210301140656_add_withdraws_table.go new file mode 100644 index 0000000000..d6e4d0a5f0 --- /dev/null +++ b/pkg/migrations/sqlite3/20210301140656_add_withdraws_table.go @@ -0,0 +1,44 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddWithdrawsTable, downAddWithdrawsTable) + +} + +func upAddWithdrawsTable(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `withdraws`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n -- asset is the asset name (currency)\n `asset` VARCHAR(10) NOT NULL,\n `address` VARCHAR(128) NOT NULL,\n `network` VARCHAR(32) NOT NULL DEFAULT '',\n `amount` DECIMAL(16, 8) NOT NULL,\n `txn_id` VARCHAR(256) NOT NULL,\n `txn_fee` DECIMAL(16, 8) NOT NULL DEFAULT 0,\n `txn_fee_currency` VARCHAR(32) NOT NULL DEFAULT '',\n `time` DATETIME(3) NOT NULL\n);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE UNIQUE INDEX `withdraws_txn_id` ON `withdraws` (`exchange`, `txn_id`);") + if err != nil { + return err + } + + return err +} + +func downAddWithdrawsTable(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS `withdraws_txn_id`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `withdraws`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20210307201830_add_deposits_table.go b/pkg/migrations/sqlite3/20210307201830_add_deposits_table.go new file mode 100644 index 0000000000..0416c54395 --- /dev/null +++ b/pkg/migrations/sqlite3/20210307201830_add_deposits_table.go @@ -0,0 +1,44 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddDepositsTable, downAddDepositsTable) + +} + +func upAddDepositsTable(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `deposits`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n `exchange` VARCHAR(24) NOT NULL,\n -- asset is the asset name (currency)\n `asset` VARCHAR(10) NOT NULL,\n `address` VARCHAR(128) NOT NULL DEFAULT '',\n `amount` DECIMAL(16, 8) NOT NULL,\n `txn_id` VARCHAR(256) NOT NULL,\n `time` DATETIME(3) NOT NULL\n);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE UNIQUE INDEX `deposits_txn_id` ON `deposits` (`exchange`, `txn_id`);") + if err != nil { + return err + } + + return err +} + +func downAddDepositsTable(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS `deposits_txn_id`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `deposits`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20210531234123_add_kline_taker_buy_columns.go b/pkg/migrations/sqlite3/20210531234123_add_kline_taker_buy_columns.go new file mode 100644 index 0000000000..a5f1c8c2c9 --- /dev/null +++ b/pkg/migrations/sqlite3/20210531234123_add_kline_taker_buy_columns.go @@ -0,0 +1,44 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddKlineTakerBuyColumns, downAddKlineTakerBuyColumns) + +} + +func upAddKlineTakerBuyColumns(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `binance_klines`\n ADD COLUMN `quote_volume` DECIMAL NOT NULL DEFAULT 0.0;\nALTER TABLE `binance_klines`\n ADD COLUMN `taker_buy_base_volume` DECIMAL NOT NULL DEFAULT 0.0;\nALTER TABLE `binance_klines`\n ADD COLUMN `taker_buy_quote_volume` DECIMAL NOT NULL DEFAULT 0.0;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `max_klines`\n ADD COLUMN `quote_volume` DECIMAL NOT NULL DEFAULT 0.0;\nALTER TABLE `max_klines`\n ADD COLUMN `taker_buy_base_volume` DECIMAL NOT NULL DEFAULT 0.0;\nALTER TABLE `max_klines`\n ADD COLUMN `taker_buy_quote_volume` DECIMAL NOT NULL DEFAULT 0.0;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `okex_klines`\n ADD COLUMN `quote_volume` DECIMAL NOT NULL DEFAULT 0.0;\nALTER TABLE `okex_klines`\n ADD COLUMN `taker_buy_base_volume` DECIMAL NOT NULL DEFAULT 0.0;\nALTER TABLE `okex_klines`\n ADD COLUMN `taker_buy_quote_volume` DECIMAL NOT NULL DEFAULT 0.0;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `klines`\n ADD COLUMN `quote_volume` DECIMAL NOT NULL DEFAULT 0.0;\nALTER TABLE `klines`\n ADD COLUMN `taker_buy_base_volume` DECIMAL NOT NULL DEFAULT 0.0;\nALTER TABLE `klines`\n ADD COLUMN `taker_buy_quote_volume` DECIMAL NOT NULL DEFAULT 0.0;") + if err != nil { + return err + } + + return err +} + +func downAddKlineTakerBuyColumns(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + return err +} diff --git a/pkg/migrations/sqlite3/20211204014905_update_taker_buy_base_volumn.go b/pkg/migrations/sqlite3/20211204014905_update_taker_buy_base_volumn.go new file mode 100644 index 0000000000..2155ea7b48 --- /dev/null +++ b/pkg/migrations/sqlite3/20211204014905_update_taker_buy_base_volumn.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upUpdateTakerBuyBaseVolumn, downUpdateTakerBuyBaseVolumn) + +} + +func upUpdateTakerBuyBaseVolumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "") + if err != nil { + return err + } + + return err +} + +func downUpdateTakerBuyBaseVolumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20211205162302_add_is_futures_column.go b/pkg/migrations/sqlite3/20211205162302_add_is_futures_column.go new file mode 100644 index 0000000000..7970df0980 --- /dev/null +++ b/pkg/migrations/sqlite3/20211205162302_add_is_futures_column.go @@ -0,0 +1,44 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddIsFuturesColumn, downAddIsFuturesColumn) + +} + +func upAddIsFuturesColumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` ADD COLUMN `is_futures` BOOLEAN NOT NULL DEFAULT FALSE;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `orders` ADD COLUMN `is_futures` BOOLEAN NOT NULL DEFAULT FALSE;") + if err != nil { + return err + } + + return err +} + +func downAddIsFuturesColumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `trades` RENAME COLUMN `is_futures` TO `is_futures_deleted`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `orders` RENAME COLUMN `is_futures` TO `is_futures_deleted`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20211211020303_add_ftx_kline.go b/pkg/migrations/sqlite3/20211211020303_add_ftx_kline.go new file mode 100644 index 0000000000..4b72d8aba2 --- /dev/null +++ b/pkg/migrations/sqlite3/20211211020303_add_ftx_kline.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddFtxKline, downAddFtxKline) + +} + +func upAddFtxKline(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `ftx_klines`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n `exchange` VARCHAR(10) NOT NULL,\n `start_time` DATETIME(3) NOT NULL,\n `end_time` DATETIME(3) NOT NULL,\n `interval` VARCHAR(3) NOT NULL,\n `symbol` VARCHAR(7) NOT NULL,\n `open` DECIMAL(16, 8) NOT NULL,\n `high` DECIMAL(16, 8) NOT NULL,\n `low` DECIMAL(16, 8) NOT NULL,\n `close` DECIMAL(16, 8) NOT NULL DEFAULT 0.0,\n `volume` DECIMAL(16, 8) NOT NULL DEFAULT 0.0,\n `closed` BOOLEAN NOT NULL DEFAULT TRUE,\n `last_trade_id` INT NOT NULL DEFAULT 0,\n `num_trades` INT NOT NULL DEFAULT 0,\n `quote_volume` DECIMAL NOT NULL DEFAULT 0.0,\n `taker_buy_base_volume` DECIMAL NOT NULL DEFAULT 0.0,\n `taker_buy_quote_volume` DECIMAL NOT NULL DEFAULT 0.0\n);") + if err != nil { + return err + } + + return err +} + +func downAddFtxKline(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE ftx_klines;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20211211034818_add_nav_history_details.go b/pkg/migrations/sqlite3/20211211034818_add_nav_history_details.go new file mode 100644 index 0000000000..4dc6eda9ea --- /dev/null +++ b/pkg/migrations/sqlite3/20211211034818_add_nav_history_details.go @@ -0,0 +1,39 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddNavHistoryDetails, downAddNavHistoryDetails) + +} + +func upAddNavHistoryDetails(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `nav_history_details`\n(\n `gid` BIGINT UNSIGNED AUTO_INCREMENT PRIMARY KEY,\n `exchange` VARCHAR(30) NOT NULL DEFAULT '',\n `subaccount` VARCHAR(30) NOT NULL DEFAULT '',\n `time` DATETIME(3) NOT NULL DEFAULT (strftime('%s', 'now')),\n `currency` VARCHAR(30) NOT NULL,\n `net_asset_in_usd` DECIMAL DEFAULT 0.00000000 NOT NULL,\n `net_asset_in_btc` DECIMAL DEFAULT 0.00000000 NOT NULL,\n `balance` DECIMAL DEFAULT 0.00000000 NOT NULL,\n `available` DECIMAL DEFAULT 0.00000000 NOT NULL,\n `locked` DECIMAL DEFAULT 0.00000000 NOT NULL\n);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX idx_nav_history_details\n on nav_history_details (time, currency, exchange);") + if err != nil { + return err + } + + return err +} + +func downAddNavHistoryDetails(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE nav_history_details;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20211211103657_update_fee_currency_length.go b/pkg/migrations/sqlite3/20211211103657_update_fee_currency_length.go new file mode 100644 index 0000000000..c9df8bfcb4 --- /dev/null +++ b/pkg/migrations/sqlite3/20211211103657_update_fee_currency_length.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upUpdateFeeCurrencyLength, downUpdateFeeCurrencyLength) + +} + +func upUpdateFeeCurrencyLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} + +func downUpdateFeeCurrencyLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20211226022411_add_kucoin_klines.go b/pkg/migrations/sqlite3/20211226022411_add_kucoin_klines.go new file mode 100644 index 0000000000..caf4b96ba9 --- /dev/null +++ b/pkg/migrations/sqlite3/20211226022411_add_kucoin_klines.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddKucoinKlines, downAddKucoinKlines) + +} + +func upAddKucoinKlines(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `kucoin_klines`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n `exchange` VARCHAR(10) NOT NULL,\n `start_time` DATETIME(3) NOT NULL,\n `end_time` DATETIME(3) NOT NULL,\n `interval` VARCHAR(3) NOT NULL,\n `symbol` VARCHAR(7) NOT NULL,\n `open` DECIMAL(16, 8) NOT NULL,\n `high` DECIMAL(16, 8) NOT NULL,\n `low` DECIMAL(16, 8) NOT NULL,\n `close` DECIMAL(16, 8) NOT NULL DEFAULT 0.0,\n `volume` DECIMAL(16, 8) NOT NULL DEFAULT 0.0,\n `closed` BOOLEAN NOT NULL DEFAULT TRUE,\n `last_trade_id` INT NOT NULL DEFAULT 0,\n `num_trades` INT NOT NULL DEFAULT 0,\n `quote_volume` DECIMAL NOT NULL DEFAULT 0.0,\n `taker_buy_base_volume` DECIMAL NOT NULL DEFAULT 0.0,\n `taker_buy_quote_volume` DECIMAL NOT NULL DEFAULT 0.0\n);") + if err != nil { + return err + } + + return err +} + +func downAddKucoinKlines(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE kucoin_klines;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20220304153309_add_profit_table.go b/pkg/migrations/sqlite3/20220304153309_add_profit_table.go new file mode 100644 index 0000000000..b95968be29 --- /dev/null +++ b/pkg/migrations/sqlite3/20220304153309_add_profit_table.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddProfitTable, downAddProfitTable) + +} + +func upAddProfitTable(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `profits`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n `strategy` VARCHAR(32) NOT NULL,\n `strategy_instance_id` VARCHAR(64) NOT NULL,\n `symbol` VARCHAR(8) NOT NULL,\n -- average_cost is the position average cost\n `average_cost` DECIMAL(16, 8) NOT NULL,\n -- profit is the pnl (profit and loss)\n `profit` DECIMAL(16, 8) NOT NULL,\n -- net_profit is the pnl (profit and loss)\n `net_profit` DECIMAL(16, 8) NOT NULL,\n -- profit_margin is the pnl (profit and loss)\n `profit_margin` DECIMAL(16, 8) NOT NULL,\n -- net_profit_margin is the pnl (profit and loss)\n `net_profit_margin` DECIMAL(16, 8) NOT NULL,\n `quote_currency` VARCHAR(10) NOT NULL,\n `base_currency` VARCHAR(10) NOT NULL,\n -- -------------------------------------------------------\n -- embedded trade data --\n -- -------------------------------------------------------\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n `is_futures` BOOLEAN NOT NULL DEFAULT FALSE,\n `is_margin` BOOLEAN NOT NULL DEFAULT FALSE,\n `is_isolated` BOOLEAN NOT NULL DEFAULT FALSE,\n `trade_id` BIGINT NOT NULL,\n -- side is the side of the trade that makes profit\n `side` VARCHAR(4) NOT NULL DEFAULT '',\n `is_buyer` BOOLEAN NOT NULL DEFAULT FALSE,\n `is_maker` BOOLEAN NOT NULL DEFAULT FALSE,\n -- price is the price of the trade that makes profit\n `price` DECIMAL(16, 8) NOT NULL,\n -- quantity is the quantity of the trade that makes profit\n `quantity` DECIMAL(16, 8) NOT NULL,\n -- trade_amount is the quote quantity of the trade that makes profit\n `quote_quantity` DECIMAL(16, 8) NOT NULL,\n `traded_at` DATETIME(3) NOT NULL,\n -- fee\n `fee_in_usd` DECIMAL(16, 8),\n `fee` DECIMAL(16, 8) NOT NULL,\n `fee_currency` VARCHAR(10) NOT NULL\n);") + if err != nil { + return err + } + + return err +} + +func downAddProfitTable(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `profits`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20220307132917_add_positions.go b/pkg/migrations/sqlite3/20220307132917_add_positions.go new file mode 100644 index 0000000000..21c1e757f3 --- /dev/null +++ b/pkg/migrations/sqlite3/20220307132917_add_positions.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddPositions, downAddPositions) + +} + +func upAddPositions(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `positions`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n `strategy` VARCHAR(32) NOT NULL,\n `strategy_instance_id` VARCHAR(64) NOT NULL,\n `symbol` VARCHAR(20) NOT NULL,\n `quote_currency` VARCHAR(10) NOT NULL,\n `base_currency` VARCHAR(10) NOT NULL,\n -- average_cost is the position average cost\n `average_cost` DECIMAL(16, 8) NOT NULL,\n `base` DECIMAL(16, 8) NOT NULL,\n `quote` DECIMAL(16, 8) NOT NULL,\n `profit` DECIMAL(16, 8) NULL,\n -- trade related columns\n `trade_id` BIGINT NOT NULL,\n `side` VARCHAR(4) NOT NULL, -- side of the trade\n `exchange` VARCHAR(12) NOT NULL, -- exchange of the trade\n `traded_at` DATETIME(3) NOT NULL\n);") + if err != nil { + return err + } + + return err +} + +func downAddPositions(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `positions`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20220317125555_fix_trade_indexes.go b/pkg/migrations/sqlite3/20220317125555_fix_trade_indexes.go new file mode 100644 index 0000000000..1e68af6998 --- /dev/null +++ b/pkg/migrations/sqlite3/20220317125555_fix_trade_indexes.go @@ -0,0 +1,84 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upFixTradeIndexes, downFixTradeIndexes) + +} + +func upFixTradeIndexes(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS trades_symbol;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS trades_symbol_fee_currency;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS trades_traded_at_symbol;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_traded_at ON trades (traded_at, symbol, exchange, id, fee_currency, fee);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_id_traded_at ON trades (id, traded_at);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_order_id_traded_at ON trades (order_id, traded_at);") + if err != nil { + return err + } + + return err +} + +func downFixTradeIndexes(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS trades_traded_at;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS trades_id_traded_at;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX IF EXISTS trades_order_id_traded_at;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_symbol ON trades (exchange, symbol);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_symbol_fee_currency ON trades (exchange, symbol, fee_currency, traded_at);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE INDEX trades_traded_at_symbol ON trades (exchange, traded_at, symbol);") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20220419121046_fix_fee_column.go b/pkg/migrations/sqlite3/20220419121046_fix_fee_column.go new file mode 100644 index 0000000000..788f34b87c --- /dev/null +++ b/pkg/migrations/sqlite3/20220419121046_fix_fee_column.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upFixFeeColumn, downFixFeeColumn) + +} + +func upFixFeeColumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} + +func downFixFeeColumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20220503144849_add_margin_info_to_nav.go b/pkg/migrations/sqlite3/20220503144849_add_margin_info_to_nav.go new file mode 100644 index 0000000000..849e7e19ac --- /dev/null +++ b/pkg/migrations/sqlite3/20220503144849_add_margin_info_to_nav.go @@ -0,0 +1,64 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upAddMarginInfoToNav, downAddMarginInfoToNav) + +} + +func upAddMarginInfoToNav(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `nav_history_details` ADD COLUMN `session` VARCHAR(50) NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `nav_history_details` ADD COLUMN `borrowed` DECIMAL DEFAULT 0.00000000 NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `nav_history_details` ADD COLUMN `net_asset` DECIMAL DEFAULT 0.00000000 NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `nav_history_details` ADD COLUMN `price_in_usd` DECIMAL DEFAULT 0.00000000 NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `nav_history_details` ADD COLUMN `is_margin` BOOL DEFAULT FALSE NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `nav_history_details` ADD COLUMN `is_isolated` BOOL DEFAULT FALSE NOT NULL;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "ALTER TABLE `nav_history_details` ADD COLUMN `isolated_symbol` VARCHAR(30) DEFAULT '' NOT NULL;") + if err != nil { + return err + } + + return err +} + +func downAddMarginInfoToNav(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20220504184155_fix_net_asset_column.go b/pkg/migrations/sqlite3/20220504184155_fix_net_asset_column.go new file mode 100644 index 0000000000..d398ad5272 --- /dev/null +++ b/pkg/migrations/sqlite3/20220504184155_fix_net_asset_column.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upFixNetAssetColumn, downFixNetAssetColumn) + +} + +func upFixNetAssetColumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "ALTER TABLE `nav_history_details` ADD COLUMN `interest` DECIMAL DEFAULT 0.00000000 NOT NULL;") + if err != nil { + return err + } + + return err +} + +func downFixNetAssetColumn(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20220512170330_fix_profit_symbol_length.go b/pkg/migrations/sqlite3/20220512170330_fix_profit_symbol_length.go new file mode 100644 index 0000000000..31b3783607 --- /dev/null +++ b/pkg/migrations/sqlite3/20220512170330_fix_profit_symbol_length.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upFixProfitSymbolLength, downFixProfitSymbolLength) + +} + +func upFixProfitSymbolLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} + +func downFixProfitSymbolLength(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "SELECT 1;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20220520140707_kline_unique_idx.go b/pkg/migrations/sqlite3/20220520140707_kline_unique_idx.go new file mode 100644 index 0000000000..605187154f --- /dev/null +++ b/pkg/migrations/sqlite3/20220520140707_kline_unique_idx.go @@ -0,0 +1,74 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upKlineUniqueIdx, downKlineUniqueIdx) + +} + +func upKlineUniqueIdx(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE UNIQUE INDEX idx_kline_binance_unique\n ON binance_klines (`symbol`, `interval`, `start_time`);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE UNIQUE INDEX idx_kline_max_unique\n ON max_klines (`symbol`, `interval`, `start_time`);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE UNIQUE INDEX `idx_kline_ftx_unique`\n ON ftx_klines (`symbol`, `interval`, `start_time`);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE UNIQUE INDEX `idx_kline_kucoin_unique`\n ON kucoin_klines (`symbol`, `interval`, `start_time`);") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "CREATE UNIQUE INDEX `idx_kline_okex_unique`\n ON okex_klines (`symbol`, `interval`, `start_time`);") + if err != nil { + return err + } + + return err +} + +func downKlineUniqueIdx(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP INDEX `idx_kline_ftx_unique` ON `ftx_klines`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX `idx_kline_max_unique` ON `max_klines`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX `idx_kline_binance_unique` ON `binance_klines`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX `idx_kline_kucoin_unique` ON `kucoin_klines`;") + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, "DROP INDEX `idx_kline_okex_unique` ON `okex_klines`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20220531012226_margin_loans.go b/pkg/migrations/sqlite3/20220531012226_margin_loans.go new file mode 100644 index 0000000000..25bfc68b98 --- /dev/null +++ b/pkg/migrations/sqlite3/20220531012226_margin_loans.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upMarginLoans, downMarginLoans) + +} + +func upMarginLoans(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `margin_loans`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n `transaction_id` INTEGER NOT NULL,\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n `asset` VARCHAR(24) NOT NULL DEFAULT '',\n `isolated_symbol` VARCHAR(24) NOT NULL DEFAULT '',\n -- quantity is the quantity of the trade that makes profit\n `principle` DECIMAL(16, 8) NOT NULL,\n `time` DATETIME(3) NOT NULL\n);") + if err != nil { + return err + } + + return err +} + +func downMarginLoans(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `margin_loans`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20220531013327_margin_repays.go b/pkg/migrations/sqlite3/20220531013327_margin_repays.go new file mode 100644 index 0000000000..d915643220 --- /dev/null +++ b/pkg/migrations/sqlite3/20220531013327_margin_repays.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upMarginRepays, downMarginRepays) + +} + +func upMarginRepays(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `margin_repays`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n `transaction_id` INTEGER NOT NULL,\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n `asset` VARCHAR(24) NOT NULL DEFAULT '',\n `isolated_symbol` VARCHAR(24) NOT NULL DEFAULT '',\n -- quantity is the quantity of the trade that makes profit\n `principle` DECIMAL(16, 8) NOT NULL,\n `time` DATETIME(3) NOT NULL\n);") + if err != nil { + return err + } + + return err +} + +func downMarginRepays(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `margin_repays`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20220531013541_margin_interests.go b/pkg/migrations/sqlite3/20220531013541_margin_interests.go new file mode 100644 index 0000000000..0c06a2ce62 --- /dev/null +++ b/pkg/migrations/sqlite3/20220531013541_margin_interests.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upMarginInterests, downMarginInterests) + +} + +func upMarginInterests(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `margin_interests`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n `asset` VARCHAR(24) NOT NULL DEFAULT '',\n `isolated_symbol` VARCHAR(24) NOT NULL DEFAULT '',\n `principle` DECIMAL(16, 8) NOT NULL,\n `interest` DECIMAL(20, 16) NOT NULL,\n `interest_rate` DECIMAL(20, 16) NOT NULL,\n `time` DATETIME(3) NOT NULL\n);") + if err != nil { + return err + } + + return err +} + +func downMarginInterests(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `margin_interests`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/20220531015005_margin_liquidations.go b/pkg/migrations/sqlite3/20220531015005_margin_liquidations.go new file mode 100644 index 0000000000..5f1e07f76e --- /dev/null +++ b/pkg/migrations/sqlite3/20220531015005_margin_liquidations.go @@ -0,0 +1,34 @@ +package sqlite3 + +import ( + "context" + + "github.com/c9s/rockhopper" +) + +func init() { + AddMigration(upMarginLiquidations, downMarginLiquidations) + +} + +func upMarginLiquidations(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is applied. + + _, err = tx.ExecContext(ctx, "CREATE TABLE `margin_liquidations`\n(\n `gid` INTEGER PRIMARY KEY AUTOINCREMENT,\n `exchange` VARCHAR(24) NOT NULL DEFAULT '',\n `symbol` VARCHAR(24) NOT NULL DEFAULT '',\n `order_id` INTEGER NOT NULL,\n `is_isolated` BOOL NOT NULL DEFAULT false,\n `average_price` DECIMAL(16, 8) NOT NULL,\n `price` DECIMAL(16, 8) NOT NULL,\n `quantity` DECIMAL(16, 8) NOT NULL,\n `executed_quantity` DECIMAL(16, 8) NOT NULL,\n `side` VARCHAR(5) NOT NULL DEFAULT '',\n `time_in_force` VARCHAR(5) NOT NULL DEFAULT '',\n `time` DATETIME(3) NOT NULL\n);") + if err != nil { + return err + } + + return err +} + +func downMarginLiquidations(ctx context.Context, tx rockhopper.SQLExecutor) (err error) { + // This code is executed when the migration is rolled back. + + _, err = tx.ExecContext(ctx, "DROP TABLE IF EXISTS `margin_liquidations`;") + if err != nil { + return err + } + + return err +} diff --git a/pkg/migrations/sqlite3/migration_api.go b/pkg/migrations/sqlite3/migration_api.go new file mode 100644 index 0000000000..eecc54695a --- /dev/null +++ b/pkg/migrations/sqlite3/migration_api.go @@ -0,0 +1,87 @@ +package sqlite3 + +import ( + "fmt" + "log" + "runtime" + "strings" + + "github.com/c9s/rockhopper" +) + +var registeredGoMigrations map[int64]*rockhopper.Migration + +func MergeMigrationsMap(ms map[int64]*rockhopper.Migration) { + for k, m := range ms { + if _, ok := registeredGoMigrations[k]; !ok { + registeredGoMigrations[k] = m + } else { + log.Printf("the migration key %d is duplicated: %+v", k, m) + } + } +} + +func GetMigrationsMap() map[int64]*rockhopper.Migration { + return registeredGoMigrations +} + +// SortedMigrations builds up the migration objects, sort them by timestamp and return as a slice +func SortedMigrations() rockhopper.MigrationSlice { + return Migrations() +} + +// Migrations builds up the migration objects, sort them by timestamp and return as a slice +func Migrations() rockhopper.MigrationSlice { + var migrations = rockhopper.MigrationSlice{} + for _, migration := range registeredGoMigrations { + migrations = append(migrations, migration) + } + + return migrations.SortAndConnect() +} + +// AddMigration adds a migration with its runtime caller information +func AddMigration(up, down rockhopper.TransactionHandler) { + pc, filename, _, _ := runtime.Caller(1) + + funcName := runtime.FuncForPC(pc).Name() + packageName := _parseFuncPackageName(funcName) + AddNamedMigration(packageName, filename, up, down) +} + +// parseFuncPackageName parses the package name from a given runtime caller function name +func _parseFuncPackageName(funcName string) string { + lastSlash := strings.LastIndexByte(funcName, '/') + if lastSlash < 0 { + lastSlash = 0 + } + + lastDot := strings.LastIndexByte(funcName[lastSlash:], '.') + lastSlash + packageName := funcName[:lastDot] + return packageName +} + +// AddNamedMigration adds a named migration to the registered go migration map +func AddNamedMigration(packageName, filename string, up, down rockhopper.TransactionHandler) { + if registeredGoMigrations == nil { + registeredGoMigrations = make(map[int64]*rockhopper.Migration) + } + + v, _ := rockhopper.FileNumericComponent(filename) + + migration := &rockhopper.Migration{ + Package: packageName, + Registered: true, + + Version: v, + UpFn: up, + DownFn: down, + Source: filename, + UseTx: true, + } + + if existing, ok := registeredGoMigrations[v]; ok { + panic(fmt.Sprintf("failed to add migration %q: version conflicts with %q", filename, existing.Source)) + } + registeredGoMigrations[v] = migration +} diff --git a/pkg/migrations/sqlite3/migration_api_test.go b/pkg/migrations/sqlite3/migration_api_test.go new file mode 100644 index 0000000000..d7f77c875c --- /dev/null +++ b/pkg/migrations/sqlite3/migration_api_test.go @@ -0,0 +1,20 @@ +package sqlite3 + +import ( + "testing" + + "github.com/c9s/rockhopper" + "github.com/stretchr/testify/assert" +) + +func TestGetMigrationsMap(t *testing.T) { + mm := GetMigrationsMap() + assert.NotEmpty(t, mm) +} + +func TestMergeMigrationsMap(t *testing.T) { + MergeMigrationsMap(map[int64]*rockhopper.Migration{ + 2: {}, + 3: {}, + }) +} diff --git a/pkg/net/websocketbase/client.go b/pkg/net/websocketbase/client.go new file mode 100644 index 0000000000..0754777f15 --- /dev/null +++ b/pkg/net/websocketbase/client.go @@ -0,0 +1,100 @@ +package websocketbase + +import ( + "context" + "sync" + "time" + + "github.com/gorilla/websocket" +) + +// WebsocketClientBase is a legacy base client +// Deprecated: please use standard stream instead. +//go:generate callbackgen -type WebsocketClientBase +type WebsocketClientBase struct { + baseURL string + + // mu protects conn + mu sync.Mutex + conn *websocket.Conn + reconnectC chan struct{} + reconnectDuration time.Duration + + connectedCallbacks []func(conn *websocket.Conn) + disconnectedCallbacks []func(conn *websocket.Conn) + messageCallbacks []func(message []byte) + errorCallbacks []func(err error) +} + +func NewWebsocketClientBase(baseURL string, reconnectDuration time.Duration) *WebsocketClientBase { + return &WebsocketClientBase{ + baseURL: baseURL, + reconnectC: make(chan struct{}, 1), + reconnectDuration: reconnectDuration, + } +} + +func (s *WebsocketClientBase) Listen(ctx context.Context) { + for { + select { + case <-ctx.Done(): + return + case <-s.reconnectC: + time.Sleep(s.reconnectDuration) + if err := s.connect(ctx); err != nil { + s.Reconnect() + } + default: + conn := s.Conn() + mt, msg, err := conn.ReadMessage() + + if err != nil { + s.Reconnect() + continue + } + + if mt != websocket.TextMessage { + continue + } + + s.EmitMessage(msg) + } + } +} + +func (s *WebsocketClientBase) Connect(ctx context.Context) error { + if err := s.connect(ctx); err != nil { + return err + } + go s.Listen(ctx) + return nil +} + +func (s *WebsocketClientBase) Reconnect() { + select { + case s.reconnectC <- struct{}{}: + default: + } +} + +func (s *WebsocketClientBase) connect(ctx context.Context) error { + dialer := websocket.DefaultDialer + conn, _, err := dialer.DialContext(ctx, s.baseURL, nil) + if err != nil { + return err + } + + s.mu.Lock() + s.conn = conn + s.mu.Unlock() + + s.EmitConnected(conn) + + return nil +} + +func (s *WebsocketClientBase) Conn() *websocket.Conn { + s.mu.Lock() + defer s.mu.Unlock() + return s.conn +} diff --git a/pkg/net/websocketbase/websocketclientbase_callbacks.go b/pkg/net/websocketbase/websocketclientbase_callbacks.go new file mode 100644 index 0000000000..4445357854 --- /dev/null +++ b/pkg/net/websocketbase/websocketclientbase_callbacks.go @@ -0,0 +1,47 @@ +// Code generated by "callbackgen -type WebsocketClientBase"; DO NOT EDIT. + +package websocketbase + +import ( + "github.com/gorilla/websocket" +) + +func (s *WebsocketClientBase) OnConnected(cb func(conn *websocket.Conn)) { + s.connectedCallbacks = append(s.connectedCallbacks, cb) +} + +func (s *WebsocketClientBase) EmitConnected(conn *websocket.Conn) { + for _, cb := range s.connectedCallbacks { + cb(conn) + } +} + +func (s *WebsocketClientBase) OnDisconnected(cb func(conn *websocket.Conn)) { + s.disconnectedCallbacks = append(s.disconnectedCallbacks, cb) +} + +func (s *WebsocketClientBase) EmitDisconnected(conn *websocket.Conn) { + for _, cb := range s.disconnectedCallbacks { + cb(conn) + } +} + +func (s *WebsocketClientBase) OnMessage(cb func(message []byte)) { + s.messageCallbacks = append(s.messageCallbacks, cb) +} + +func (s *WebsocketClientBase) EmitMessage(message []byte) { + for _, cb := range s.messageCallbacks { + cb(message) + } +} + +func (s *WebsocketClientBase) OnError(cb func(err error)) { + s.errorCallbacks = append(s.errorCallbacks, cb) +} + +func (s *WebsocketClientBase) EmitError(err error) { + for _, cb := range s.errorCallbacks { + cb(err) + } +} diff --git a/pkg/notifier/slacknotifier/slack.go b/pkg/notifier/slacknotifier/slack.go index 59e062e4e6..69e2292574 100644 --- a/pkg/notifier/slacknotifier/slack.go +++ b/pkg/notifier/slacknotifier/slack.go @@ -3,86 +3,151 @@ package slacknotifier import ( "context" "fmt" + "time" + + "golang.org/x/time/rate" + + "github.com/c9s/bbgo/pkg/types" log "github.com/sirupsen/logrus" "github.com/slack-go/slack" ) -type SlackAttachmentCreator interface { +var limiter = rate.NewLimiter(rate.Every(1*time.Second), 1) + +type notifyTask struct { + Channel string + Opts []slack.MsgOption +} + +type slackAttachmentCreator interface { SlackAttachment() slack.Attachment } type Notifier struct { client *slack.Client channel string + + taskC chan notifyTask } type NotifyOption func(notifier *Notifier) -func New(token, channel string, options ...NotifyOption) *Notifier { - // var client = slack.New(token, slack.OptionDebug(true)) - var client = slack.New(token) - +func New(client *slack.Client, channel string, options ...NotifyOption) *Notifier { notifier := &Notifier{ channel: channel, client: client, + taskC: make(chan notifyTask, 100), } for _, o := range options { o(notifier) } - return notifier -} + go notifier.worker() -func (n *Notifier) Notify(format string, args ...interface{}) { - n.NotifyTo(n.channel, format, args...) + return notifier } -func (n *Notifier) NotifyTo(channel, format string, args ...interface{}) { - if len(channel) == 0 { - channel = n.channel +func (n *Notifier) worker() { + ctx := context.Background() + for { + select { + case <-ctx.Done(): + return + + case task := <-n.taskC: + limiter.Wait(ctx) + _, _, err := n.client.PostMessageContext(ctx, task.Channel, task.Opts...) + if err != nil { + log.WithError(err). + WithField("channel", task.Channel). + Errorf("slack api error: %s", err.Error()) + } + } } +} - var slackAttachments []slack.Attachment - var slackArgsOffset = -1 +func (n *Notifier) Notify(obj interface{}, args ...interface{}) { + n.NotifyTo(n.channel, obj, args...) +} +func filterSlackAttachments(args []interface{}) (slackAttachments []slack.Attachment, pureArgs []interface{}) { + var firstAttachmentOffset = -1 for idx, arg := range args { switch a := arg.(type) { // concrete type assert first case slack.Attachment: - if slackArgsOffset == -1 { - slackArgsOffset = idx + if firstAttachmentOffset == -1 { + firstAttachmentOffset = idx } slackAttachments = append(slackAttachments, a) - case SlackAttachmentCreator: - if slackArgsOffset == -1 { - slackArgsOffset = idx + case slackAttachmentCreator: + if firstAttachmentOffset == -1 { + firstAttachmentOffset = idx } slackAttachments = append(slackAttachments, a.SlackAttachment()) + case types.PlainText: + if firstAttachmentOffset == -1 { + firstAttachmentOffset = idx + } + + // fallback to PlainText if it's not supported + // convert plain text to slack attachment + text := a.PlainText() + slackAttachments = append(slackAttachments, slack.Attachment{ + Title: text, + }) } } - var nonSlackArgs = args - if slackArgsOffset > -1 { - nonSlackArgs = args[:slackArgsOffset] + pureArgs = args + if firstAttachmentOffset > -1 { + pureArgs = args[:firstAttachmentOffset] } - _, _, err := n.client.PostMessageContext(context.Background(), channel, - slack.MsgOptionText(fmt.Sprintf(format, nonSlackArgs...), true), - slack.MsgOptionAttachments(slackAttachments...)) - if err != nil { - log.WithError(err). - WithField("channel", channel). - Errorf("slack error: %s", err.Error()) + return slackAttachments, pureArgs +} + +func (n *Notifier) NotifyTo(channel string, obj interface{}, args ...interface{}) { + if len(channel) == 0 { + channel = n.channel + } + + slackAttachments, pureArgs := filterSlackAttachments(args) + + var opts []slack.MsgOption + + switch a := obj.(type) { + case string: + opts = append(opts, slack.MsgOptionText(fmt.Sprintf(a, pureArgs...), true), + slack.MsgOptionAttachments(slackAttachments...)) + + case slack.Attachment: + opts = append(opts, slack.MsgOptionAttachments(append([]slack.Attachment{a}, slackAttachments...)...)) + + case slackAttachmentCreator: + // convert object to slack attachment (if supported) + opts = append(opts, slack.MsgOptionAttachments(append([]slack.Attachment{a.SlackAttachment()}, slackAttachments...)...)) + + default: + log.Errorf("slack message conversion error, unsupported object: %T %+v", a, a) + } - return + select { + case n.taskC <- notifyTask{ + Channel: channel, + Opts: opts, + }: + case <-time.After(50 * time.Millisecond): + return + } } /* diff --git a/pkg/notifier/telegramnotifier/interaction.go b/pkg/notifier/telegramnotifier/interaction.go deleted file mode 100644 index dc00e20b5b..0000000000 --- a/pkg/notifier/telegramnotifier/interaction.go +++ /dev/null @@ -1,152 +0,0 @@ -package telegramnotifier - -import ( - "fmt" - - "github.com/pquerna/otp" - "github.com/pquerna/otp/totp" - "github.com/sirupsen/logrus" - "gopkg.in/tucnak/telebot.v2" - - "github.com/c9s/bbgo/pkg/bbgo" -) - -var log = logrus.WithField("service", "telegram") - -type Session struct { - Owner *telebot.User `json:"owner"` - OneTimePasswordKey *otp.Key `json:"otpKey"` -} - -func NewSession(key *otp.Key) Session { - return Session{ - Owner: nil, - OneTimePasswordKey: key, - } -} - -//go:generate callbackgen -type Interaction -type Interaction struct { - store bbgo.Store - - bot *telebot.Bot - - AuthToken string - - session *Session - - StartCallbacks []func() - AuthCallbacks []func(user *telebot.User) -} - -func NewInteraction(bot *telebot.Bot, store bbgo.Store) *Interaction { - interaction := &Interaction{ - store: store, - bot: bot, - } - - bot.Handle("/help", interaction.HandleHelp) - bot.Handle("/auth", interaction.HandleAuth) - bot.Handle("/info", interaction.HandleInfo) - return interaction -} - -func (it *Interaction) SetAuthToken(token string) { - it.AuthToken = token -} - -func (it *Interaction) Session() *Session { - return it.session -} - -func (it *Interaction) HandleInfo(m *telebot.Message) { - if it.session.Owner == nil { - return - } - - if m.Sender.ID != it.session.Owner.ID { - log.Warningf("incorrect user tried to access bot! sender: %+v", m.Sender) - } else { - if _, err := it.bot.Send(it.session.Owner, - fmt.Sprintf("Welcome! your username: %s, user ID: %d", - it.session.Owner.Username, - it.session.Owner.ID, - )); err != nil { - log.WithError(err).Error("failed to send telegram message") - } - } -} - -func (it *Interaction) SendToOwner(message string) { - if it.session.Owner == nil { - return - } - - if _, err := it.bot.Send(it.session.Owner, message); err != nil { - log.WithError(err).Error("failed to send message to the owner") - } -} - -func (it *Interaction) HandleHelp(m *telebot.Message) { - message := ` -help - show this help message -auth - authorize current telegram user to access telegram bot with authentication token or one-time password. ex. /auth my-token -info - show information about current chat -` - if _, err := it.bot.Send(m.Sender, message); err != nil { - log.WithError(err).Error("failed to send help message") - } -} - -func (it *Interaction) HandleAuth(m *telebot.Message) { - if len(it.AuthToken) > 0 && m.Payload == it.AuthToken { - it.session.Owner = m.Sender - if _, err := it.bot.Send(m.Sender, fmt.Sprintf("Hi %s, I know you, I will send you the notifications!", m.Sender.Username)); err != nil { - log.WithError(err).Error("telegram send error") - } - - if err := it.store.Save(it.session); err != nil { - log.WithError(err).Error("can not persist telegram chat user") - } - - it.EmitAuth(m.Sender) - - } else if it.session != nil && it.session.OneTimePasswordKey != nil { - - if totp.Validate(m.Payload, it.session.OneTimePasswordKey.Secret()) { - it.session.Owner = m.Sender - - if _, err := it.bot.Send(m.Sender, fmt.Sprintf("Hi %s, I know you, I will send you the notifications!", m.Sender.Username)); err != nil { - log.WithError(err).Error("telegram send error") - } - - if err := it.store.Save(it.session); err != nil { - log.WithError(err).Error("can not persist telegram chat user") - } - - it.EmitAuth(m.Sender) - - } else { - if _, err := it.bot.Send(m.Sender, "Authorization failed. please check your auth token"); err != nil { - log.WithError(err).Error("telegram send error") - } - } - - } else { - if _, err := it.bot.Send(m.Sender, "Authorization failed. please check your auth token"); err != nil { - log.WithError(err).Error("telegram send error") - } - } -} - -func (it *Interaction) Start(session Session) { - it.session = &session - - if it.session.Owner != nil { - if _, err := it.bot.Send(it.session.Owner, fmt.Sprintf("Hi %s, I'm back", it.session.Owner.Username)); err != nil { - log.WithError(err).Error("failed to send telegram message") - } - } - - it.bot.Start() -} diff --git a/pkg/notifier/telegramnotifier/interaction_callbacks.go b/pkg/notifier/telegramnotifier/interaction_callbacks.go deleted file mode 100644 index 7a82c45afd..0000000000 --- a/pkg/notifier/telegramnotifier/interaction_callbacks.go +++ /dev/null @@ -1,17 +0,0 @@ -// Code generated by "callbackgen -type Interaction"; DO NOT EDIT. - -package telegramnotifier - -import ( - "gopkg.in/tucnak/telebot.v2" -) - -func (it *Interaction) OnAuth(cb func(user *telebot.User)) { - it.AuthCallbacks = append(it.AuthCallbacks, cb) -} - -func (it *Interaction) EmitAuth(user *telebot.User) { - for _, cb := range it.AuthCallbacks { - cb(user) - } -} diff --git a/pkg/notifier/telegramnotifier/telegram.go b/pkg/notifier/telegramnotifier/telegram.go index feb431b788..36927319e0 100644 --- a/pkg/notifier/telegramnotifier/telegram.go +++ b/pkg/notifier/telegramnotifier/telegram.go @@ -2,20 +2,44 @@ package telegramnotifier import ( "fmt" + "reflect" + "strconv" + "time" + + "github.com/sirupsen/logrus" + "gopkg.in/tucnak/telebot.v2" "github.com/c9s/bbgo/pkg/types" ) +var log = logrus.WithField("service", "telegram") + type Notifier struct { - interaction *Interaction + bot *telebot.Bot + + // Subscribers stores the Chat objects for broadcasting public notification + Subscribers map[int64]time.Time `json:"subscribers"` + + // Chats are the private chats that we will send private notification + Chats map[int64]*telebot.Chat `json:"chats"` + + broadcast bool } -type NotifyOption func(notifier *Notifier) +type Option func(notifier *Notifier) + +func UseBroadcast() Option { + return func(notifier *Notifier) { + notifier.broadcast = true + } +} -// start bot daemon -func New(interaction *Interaction, options ...NotifyOption) *Notifier { +// New +func New(bot *telebot.Bot, options ...Option) *Notifier { notifier := &Notifier{ - interaction: interaction, + bot: bot, + Chats: make(map[int64]*telebot.Chat), + Subscribers: make(map[int64]time.Time), } for _, o := range options { @@ -25,36 +49,115 @@ func New(interaction *Interaction, options ...NotifyOption) *Notifier { return notifier } -func (n *Notifier) Notify(format string, args ...interface{}) { - n.NotifyTo("", format, args...) +func (n *Notifier) Notify(obj interface{}, args ...interface{}) { + n.NotifyTo("", obj, args...) } -func (n *Notifier) NotifyTo(_, format string, args ...interface{}) { - var textArgsOffset = -1 - var texts []string - +func filterPlaintextMessages(args []interface{}) (texts []string, pureArgs []interface{}) { + var firstObjectOffset = -1 for idx, arg := range args { - switch a := arg.(type) { + rt := reflect.TypeOf(arg) + if rt.Kind() == reflect.Ptr { + switch a := arg.(type) { + + case nil: + texts = append(texts, "nil") + if firstObjectOffset == -1 { + firstObjectOffset = idx + } + + case types.PlainText: + texts = append(texts, a.PlainText()) + if firstObjectOffset == -1 { + firstObjectOffset = idx + } + + case types.Stringer: + texts = append(texts, a.String()) + if firstObjectOffset == -1 { + firstObjectOffset = idx + } + } + } + } + + pureArgs = args + if firstObjectOffset > -1 { + pureArgs = args[:firstObjectOffset] + } + + return texts, pureArgs +} + +func (n *Notifier) NotifyTo(channel string, obj interface{}, args ...interface{}) { + var texts, pureArgs = filterPlaintextMessages(args) + var message string - case types.PlainText: - texts = append(texts, a.PlainText()) - textArgsOffset = idx + switch a := obj.(type) { + case string: + message = fmt.Sprintf(a, pureArgs...) + + case types.PlainText: + message = a.PlainText() + + case types.Stringer: + message = a.String() + + default: + log.Errorf("unsupported notification format: %T %+v", a, a) + + } + + if n.broadcast { + n.Broadcast(message) + for _, text := range texts { + n.Broadcast(text) + } + } else if n.Chats != nil { + for _, chat := range n.Chats { + if _, err := n.bot.Send(chat, message); err != nil { + log.WithError(err).Error("telegram send error") + } + + for _, text := range texts { + if _, err := n.bot.Send(chat, text); err != nil { + log.WithError(err).Error("telegram send error") + } + } } } +} - var simpleArgs = args - if textArgsOffset > -1 { - simpleArgs = args[:textArgsOffset] +func (n *Notifier) AddChat(c *telebot.Chat) { + if n.Chats == nil { + n.Chats = make(map[int64]*telebot.Chat) } + n.Chats[c.ID] = c +} - log.Infof(format, simpleArgs...) +func (n *Notifier) AddSubscriber(m *telebot.Message) { + if n.Subscribers == nil { + n.Subscribers = make(map[int64]time.Time) + } - message := fmt.Sprintf(format, simpleArgs...) - n.interaction.SendToOwner(message) + n.Subscribers[m.Chat.ID] = m.Time() +} - for _, text := range texts { - n.interaction.SendToOwner(text) +func (n *Notifier) Broadcast(message string) { + if n.Subscribers == nil { + return } + for chatID := range n.Subscribers { + chat, err := n.bot.ChatByID(strconv.FormatInt(chatID, 10)) + if err != nil { + log.WithError(err).Error("can not get chat by ID") + continue + } + + if _, err := n.bot.Send(chat, message); err != nil { + log.WithError(err).Error("failed to send message") + } + } } diff --git a/pkg/optimizer/config.go b/pkg/optimizer/config.go new file mode 100644 index 0000000000..ceedfed5a8 --- /dev/null +++ b/pkg/optimizer/config.go @@ -0,0 +1,69 @@ +package optimizer + +import ( + "io/ioutil" + + "gopkg.in/yaml.v3" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type SelectorConfig struct { + Type string `json:"type" yaml:"type"` + Label string `json:"label,omitempty" yaml:"label,omitempty"` + Path string `json:"path" yaml:"path"` + Values []string `json:"values,omitempty" yaml:"values,omitempty"` + Min fixedpoint.Value `json:"min,omitempty" yaml:"min,omitempty"` + Max fixedpoint.Value `json:"max,omitempty" yaml:"max,omitempty"` + Step fixedpoint.Value `json:"step,omitempty" yaml:"step,omitempty"` +} + +type LocalExecutorConfig struct { + MaxNumberOfProcesses int `json:"maxNumberOfProcesses" yaml:"maxNumberOfProcesses"` +} + +type ExecutorConfig struct { + Type string `json:"type" yaml:"type"` + LocalExecutorConfig *LocalExecutorConfig `json:"local" yaml:"local"` +} + +type Config struct { + Executor *ExecutorConfig `json:"executor" yaml:"executor"` + MaxThread int `yaml:"maxThread,omitempty"` + Matrix []SelectorConfig `yaml:"matrix"` +} + +var defaultExecutorConfig = &ExecutorConfig{ + Type: "local", + LocalExecutorConfig: defaultLocalExecutorConfig, +} + +var defaultLocalExecutorConfig = &LocalExecutorConfig{ + MaxNumberOfProcesses: 10, +} + +func LoadConfig(yamlConfigFileName string) (*Config, error) { + configYaml, err := ioutil.ReadFile(yamlConfigFileName) + if err != nil { + return nil, err + } + + var optConfig Config + if err := yaml.Unmarshal(configYaml, &optConfig); err != nil { + return nil, err + } + + if optConfig.Executor == nil { + optConfig.Executor = defaultExecutorConfig + } + + if optConfig.Executor.Type == "" { + optConfig.Executor.Type = "local" + } + + if optConfig.Executor.Type == "local" && optConfig.Executor.LocalExecutorConfig == nil { + optConfig.Executor.LocalExecutorConfig = defaultLocalExecutorConfig + } + + return &optConfig, nil +} diff --git a/pkg/optimizer/grid.go b/pkg/optimizer/grid.go new file mode 100644 index 0000000000..c517b2a705 --- /dev/null +++ b/pkg/optimizer/grid.go @@ -0,0 +1,245 @@ +package optimizer + +import ( + "context" + "encoding/json" + "fmt" + "sort" + + "github.com/evanphx/json-patch/v5" + + "github.com/c9s/bbgo/pkg/backtest" + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type MetricValueFunc func(summaryReport *backtest.SummaryReport) fixedpoint.Value + +var TotalProfitMetricValueFunc = func(summaryReport *backtest.SummaryReport) fixedpoint.Value { + return summaryReport.TotalProfit +} + +type Metric struct { + Labels []string `json:"labels,omitempty"` + Params []interface{} `json:"params,omitempty"` + Value fixedpoint.Value `json:"value,omitempty"` +} + +func copyParams(params []interface{}) []interface{} { + var c = make([]interface{}, len(params)) + copy(c, params) + return c +} + +func copyLabels(labels []string) []string { + var c = make([]string, len(labels)) + copy(c, labels) + return c +} + +type GridOptimizer struct { + Config *Config + + ParamLabels []string + CurrentParams []interface{} +} + +func (o *GridOptimizer) buildOps() []OpFunc { + var ops []OpFunc + + o.CurrentParams = make([]interface{}, len(o.Config.Matrix)) + o.ParamLabels = make([]string, len(o.Config.Matrix)) + + for i, selector := range o.Config.Matrix { + var path = selector.Path + var ii = i // copy variable because we need to use them in the closure + + if selector.Label != "" { + o.ParamLabels[ii] = selector.Label + } else { + o.ParamLabels[ii] = selector.Path + } + + switch selector.Type { + case "range": + min := selector.Min + max := selector.Max + step := selector.Step + if step.IsZero() { + step = fixedpoint.One + } + + var values []fixedpoint.Value + for val := min; val.Compare(max) <= 0; val = val.Add(step) { + values = append(values, val) + } + + f := func(configJson []byte, next func(configJson []byte) error) error { + for _, val := range values { + jsonOp := []byte(reformatJson(fmt.Sprintf(`[{"op": "replace", "path": "%s", "value": %v }]`, path, val))) + patch, err := jsonpatch.DecodePatch(jsonOp) + if err != nil { + return err + } + + log.Debugf("json op: %s", jsonOp) + + patchedJson, err := patch.ApplyIndent(configJson, " ") + if err != nil { + return err + } + + valCopy := val + o.CurrentParams[ii] = valCopy + if err := next(patchedJson); err != nil { + return err + } + } + + return nil + } + ops = append(ops, f) + + case "iterate": + values := selector.Values + f := func(configJson []byte, next func(configJson []byte) error) error { + for _, val := range values { + log.Debugf("%d %s: %v of %v", ii, path, val, values) + + jsonOp := []byte(reformatJson(fmt.Sprintf(`[{"op": "replace", "path": "%s", "value": "%s"}]`, path, val))) + patch, err := jsonpatch.DecodePatch(jsonOp) + if err != nil { + return err + } + + log.Debugf("json op: %s", jsonOp) + + patchedJson, err := patch.ApplyIndent(configJson, " ") + if err != nil { + return err + } + + valCopy := val + o.CurrentParams[ii] = valCopy + if err := next(patchedJson); err != nil { + return err + } + } + + return nil + } + ops = append(ops, f) + case "bool": + values := []bool{true, false} + f := func(configJson []byte, next func(configJson []byte) error) error { + for _, val := range values { + log.Debugf("%d %s: %v of %v", ii, path, val, values) + + jsonOp := []byte(reformatJson(fmt.Sprintf(`[{"op": "replace", "path": "%s", "value": %v}]`, path, val))) + patch, err := jsonpatch.DecodePatch(jsonOp) + if err != nil { + return err + } + + log.Debugf("json op: %s", jsonOp) + + patchedJson, err := patch.ApplyIndent(configJson, " ") + if err != nil { + return err + } + + valCopy := val + o.CurrentParams[ii] = valCopy + if err := next(patchedJson); err != nil { + return err + } + } + + return nil + } + ops = append(ops, f) + } + } + return ops +} + +func (o *GridOptimizer) Run(executor Executor, configJson []byte) (map[string][]Metric, error) { + o.CurrentParams = make([]interface{}, len(o.Config.Matrix)) + + var valueFunctions = map[string]MetricValueFunc{ + "totalProfit": TotalProfitMetricValueFunc, + } + var metrics = map[string][]Metric{} + + var ops = o.buildOps() + + var taskC = make(chan BacktestTask, 100) + + var app = func(configJson []byte, next func(configJson []byte) error) error { + var labels = copyLabels(o.ParamLabels) + var params = copyParams(o.CurrentParams) + taskC <- BacktestTask{ + ConfigJson: configJson, + Params: params, + Labels: labels, + } + return nil + } + + log.Debugf("build %d ops", len(ops)) + + var wrapper = func(configJson []byte) error { + return app(configJson, nil) + } + + for i := len(ops) - 1; i >= 0; i-- { + cur := ops[i] + inner := wrapper + wrapper = func(configJson []byte) error { + return cur(configJson, inner) + } + } + + ctx := context.Background() + resultsC, err := executor.Run(ctx, taskC) + if err != nil { + return nil, err + } + + if err := wrapper(configJson); err != nil { + return nil, err + } + close(taskC) // this will shut down the executor + + for result := range resultsC { + for metricName, metricFunc := range valueFunctions { + var metricValue = metricFunc(result.Report) + log.Infof("params: %+v => %s %+v", result.Params, metricName, metricValue) + metrics[metricName] = append(metrics[metricName], Metric{ + Params: result.Params, + Labels: result.Labels, + Value: metricValue, + }) + } + } + + for n := range metrics { + sort.Slice(metrics[n], func(i, j int) bool { + a := metrics[n][i].Value + b := metrics[n][j].Value + return a.Compare(b) > 0 + }) + } + + return metrics, err +} + +func reformatJson(text string) string { + var a interface{} + var err = json.Unmarshal([]byte(text), &a) + if err != nil { + return "{invalid json}" + } + + out, _ := json.MarshalIndent(a, "", " ") + return string(out) +} diff --git a/pkg/optimizer/local.go b/pkg/optimizer/local.go new file mode 100644 index 0000000000..393f9703fa --- /dev/null +++ b/pkg/optimizer/local.go @@ -0,0 +1,168 @@ +package optimizer + +import ( + "context" + "encoding/json" + "os" + "os/exec" + "strings" + "sync" + + "github.com/sirupsen/logrus" + "gopkg.in/yaml.v3" + + "github.com/c9s/bbgo/pkg/backtest" +) + +var log = logrus.WithField("component", "optimizer") + +type BacktestTask struct { + ConfigJson []byte + Params []interface{} + Labels []string + Report *backtest.SummaryReport + Error error +} + +type Executor interface { + // Execute(configJson []byte) (*backtest.SummaryReport, error) + Run(ctx context.Context, taskC chan BacktestTask) (chan BacktestTask, error) +} + +type AsyncHandle struct { + Error error + Report *backtest.SummaryReport + Done chan struct{} +} + +type LocalProcessExecutor struct { + Config *LocalExecutorConfig + Bin string + WorkDir string + ConfigDir string + OutputDir string +} + +func (e *LocalProcessExecutor) ExecuteAsync(configJson []byte) *AsyncHandle { + handle := &AsyncHandle{ + Done: make(chan struct{}), + } + + go func() { + defer close(handle.Done) + report, err := e.execute(configJson) + handle.Error = err + handle.Report = report + }() + + return handle +} + +func (e *LocalProcessExecutor) readReport(output []byte) (*backtest.SummaryReport, error) { + summaryReportFilepath := strings.TrimSpace(string(output)) + _, err := os.Stat(summaryReportFilepath) + if os.IsNotExist(err) { + return nil, err + } + + summaryReport, err := backtest.ReadSummaryReport(summaryReportFilepath) + if err != nil { + return nil, err + } + + return summaryReport, nil +} + +func (e *LocalProcessExecutor) Run(ctx context.Context, taskC chan BacktestTask) (chan BacktestTask, error) { + var maxNumOfProcess = e.Config.MaxNumberOfProcesses + var resultsC = make(chan BacktestTask, maxNumOfProcess*2) + + wg := sync.WaitGroup{} + wg.Add(maxNumOfProcess) + + go func() { + wg.Wait() + close(resultsC) + }() + + for i := 0; i < maxNumOfProcess; i++ { + // fork workers + go func(id int, taskC chan BacktestTask) { + taskCnt := 0 + log.Infof("starting local worker #%d", id) + defer wg.Done() + for { + select { + case <-ctx.Done(): + return + + case task, ok := <-taskC: + if !ok { + return + } + + taskCnt++ + log.Infof("local worker #%d received param task: %v", id, task.Params) + + report, err := e.execute(task.ConfigJson) + if err != nil { + log.WithError(err).Errorf("execute error") + } + + task.Error = err + task.Report = report + + resultsC <- task + } + } + }(i+1, taskC) + } + + return resultsC, nil +} + +// execute runs the config json and returns the summary report +// this is a blocking operation +func (e *LocalProcessExecutor) execute(configJson []byte) (*backtest.SummaryReport, error) { + tf, err := jsonToYamlConfig(e.ConfigDir, configJson) + if err != nil { + return nil, err + } + + c := exec.Command(e.Bin, "backtest", "--config", tf.Name(), "--output", e.OutputDir, "--subdir") + output, err := c.Output() + if err != nil { + return nil, err + } + + return e.readReport(output) +} + +// jsonToYamlConfig translate json format config into a YAML format config file +// The generated file is a temp file +func jsonToYamlConfig(dir string, configJson []byte) (*os.File, error) { + var o map[string]interface{} + if err := json.Unmarshal(configJson, &o); err != nil { + return nil, err + } + + yamlConfig, err := yaml.Marshal(o) + if err != nil { + return nil, err + } + + tf, err := os.CreateTemp(dir, "bbgo-*.yaml") + if err != nil { + return nil, err + } + + if _, err = tf.Write(yamlConfig); err != nil { + return nil, err + } + + if err := tf.Close(); err != nil { + return nil, err + } + + return tf, nil +} diff --git a/pkg/optimizer/local_test.go b/pkg/optimizer/local_test.go new file mode 100644 index 0000000000..1c0298fb71 --- /dev/null +++ b/pkg/optimizer/local_test.go @@ -0,0 +1,21 @@ +package optimizer + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_jsonToYamlConfig(t *testing.T) { + err := os.Mkdir(".tmpconfig", 0755) + assert.NoError(t, err) + + tf, err := jsonToYamlConfig(".tmpconfig", []byte(`{ + }`)) + assert.NoError(t, err) + assert.NotNil(t, tf) + assert.NotEmpty(t, tf.Name()) + + _ = os.RemoveAll(".tmpconfig") +} diff --git a/pkg/optimizer/operator.go b/pkg/optimizer/operator.go new file mode 100644 index 0000000000..c4ac89cf49 --- /dev/null +++ b/pkg/optimizer/operator.go @@ -0,0 +1,3 @@ +package optimizer + +type OpFunc func(configJson []byte, next func(configJson []byte) error) error diff --git a/pkg/pb/README.md b/pkg/pb/README.md new file mode 100644 index 0000000000..f9cf4074bd --- /dev/null +++ b/pkg/pb/README.md @@ -0,0 +1,9 @@ +# Protocol Buffers + +## Generate code + +```sh +go install google.golang.org/protobuf/cmd/protoc-gen-go@latest +cd /pkg/protobuf +protoc -I=. --go_out=. bbgo.proto +``` diff --git a/pkg/pb/bbgo.pb.go b/pkg/pb/bbgo.pb.go new file mode 100644 index 0000000000..cb3cf0e106 --- /dev/null +++ b/pkg/pb/bbgo.pb.go @@ -0,0 +1,3208 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.26.0 +// protoc v3.19.3 +// source: pkg/pb/bbgo.proto + +package pb + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Event int32 + +const ( + Event_UNKNOWN Event = 0 + Event_SUBSCRIBED Event = 1 + Event_UNSUBSCRIBED Event = 2 + Event_SNAPSHOT Event = 3 + Event_UPDATE Event = 4 + Event_AUTHENTICATED Event = 5 + Event_ERROR Event = 99 +) + +// Enum value maps for Event. +var ( + Event_name = map[int32]string{ + 0: "UNKNOWN", + 1: "SUBSCRIBED", + 2: "UNSUBSCRIBED", + 3: "SNAPSHOT", + 4: "UPDATE", + 5: "AUTHENTICATED", + 99: "ERROR", + } + Event_value = map[string]int32{ + "UNKNOWN": 0, + "SUBSCRIBED": 1, + "UNSUBSCRIBED": 2, + "SNAPSHOT": 3, + "UPDATE": 4, + "AUTHENTICATED": 5, + "ERROR": 99, + } +) + +func (x Event) Enum() *Event { + p := new(Event) + *p = x + return p +} + +func (x Event) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Event) Descriptor() protoreflect.EnumDescriptor { + return file_pkg_pb_bbgo_proto_enumTypes[0].Descriptor() +} + +func (Event) Type() protoreflect.EnumType { + return &file_pkg_pb_bbgo_proto_enumTypes[0] +} + +func (x Event) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Event.Descriptor instead. +func (Event) EnumDescriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{0} +} + +type Channel int32 + +const ( + Channel_BOOK Channel = 0 + Channel_TRADE Channel = 1 + Channel_TICKER Channel = 2 + Channel_KLINE Channel = 3 + Channel_BALANCE Channel = 4 + Channel_ORDER Channel = 5 +) + +// Enum value maps for Channel. +var ( + Channel_name = map[int32]string{ + 0: "BOOK", + 1: "TRADE", + 2: "TICKER", + 3: "KLINE", + 4: "BALANCE", + 5: "ORDER", + } + Channel_value = map[string]int32{ + "BOOK": 0, + "TRADE": 1, + "TICKER": 2, + "KLINE": 3, + "BALANCE": 4, + "ORDER": 5, + } +) + +func (x Channel) Enum() *Channel { + p := new(Channel) + *p = x + return p +} + +func (x Channel) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Channel) Descriptor() protoreflect.EnumDescriptor { + return file_pkg_pb_bbgo_proto_enumTypes[1].Descriptor() +} + +func (Channel) Type() protoreflect.EnumType { + return &file_pkg_pb_bbgo_proto_enumTypes[1] +} + +func (x Channel) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Channel.Descriptor instead. +func (Channel) EnumDescriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{1} +} + +type Side int32 + +const ( + Side_BUY Side = 0 + Side_SELL Side = 1 +) + +// Enum value maps for Side. +var ( + Side_name = map[int32]string{ + 0: "BUY", + 1: "SELL", + } + Side_value = map[string]int32{ + "BUY": 0, + "SELL": 1, + } +) + +func (x Side) Enum() *Side { + p := new(Side) + *p = x + return p +} + +func (x Side) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Side) Descriptor() protoreflect.EnumDescriptor { + return file_pkg_pb_bbgo_proto_enumTypes[2].Descriptor() +} + +func (Side) Type() protoreflect.EnumType { + return &file_pkg_pb_bbgo_proto_enumTypes[2] +} + +func (x Side) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Side.Descriptor instead. +func (Side) EnumDescriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{2} +} + +type OrderType int32 + +const ( + OrderType_MARKET OrderType = 0 + OrderType_LIMIT OrderType = 1 + OrderType_STOP_MARKET OrderType = 2 + OrderType_STOP_LIMIT OrderType = 3 + OrderType_POST_ONLY OrderType = 4 + OrderType_IOC_LIMIT OrderType = 5 +) + +// Enum value maps for OrderType. +var ( + OrderType_name = map[int32]string{ + 0: "MARKET", + 1: "LIMIT", + 2: "STOP_MARKET", + 3: "STOP_LIMIT", + 4: "POST_ONLY", + 5: "IOC_LIMIT", + } + OrderType_value = map[string]int32{ + "MARKET": 0, + "LIMIT": 1, + "STOP_MARKET": 2, + "STOP_LIMIT": 3, + "POST_ONLY": 4, + "IOC_LIMIT": 5, + } +) + +func (x OrderType) Enum() *OrderType { + p := new(OrderType) + *p = x + return p +} + +func (x OrderType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (OrderType) Descriptor() protoreflect.EnumDescriptor { + return file_pkg_pb_bbgo_proto_enumTypes[3].Descriptor() +} + +func (OrderType) Type() protoreflect.EnumType { + return &file_pkg_pb_bbgo_proto_enumTypes[3] +} + +func (x OrderType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use OrderType.Descriptor instead. +func (OrderType) EnumDescriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{3} +} + +type Empty struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *Empty) Reset() { + *x = Empty{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Empty) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Empty) ProtoMessage() {} + +func (x *Empty) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Empty.ProtoReflect.Descriptor instead. +func (*Empty) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{0} +} + +type Error struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ErrorCode int64 `protobuf:"varint,1,opt,name=error_code,json=errorCode,proto3" json:"error_code,omitempty"` + ErrorMessage string `protobuf:"bytes,2,opt,name=error_message,json=errorMessage,proto3" json:"error_message,omitempty"` +} + +func (x *Error) Reset() { + *x = Error{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Error) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Error) ProtoMessage() {} + +func (x *Error) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Error.ProtoReflect.Descriptor instead. +func (*Error) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{1} +} + +func (x *Error) GetErrorCode() int64 { + if x != nil { + return x.ErrorCode + } + return 0 +} + +func (x *Error) GetErrorMessage() string { + if x != nil { + return x.ErrorMessage + } + return "" +} + +type UserDataRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` +} + +func (x *UserDataRequest) Reset() { + *x = UserDataRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UserDataRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UserDataRequest) ProtoMessage() {} + +func (x *UserDataRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UserDataRequest.ProtoReflect.Descriptor instead. +func (*UserDataRequest) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{2} +} + +func (x *UserDataRequest) GetSession() string { + if x != nil { + return x.Session + } + return "" +} + +type UserData struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + Exchange string `protobuf:"bytes,2,opt,name=exchange,proto3" json:"exchange,omitempty"` + Channel Channel `protobuf:"varint,3,opt,name=channel,proto3,enum=bbgo.Channel" json:"channel,omitempty"` // trade, order, balance + Event Event `protobuf:"varint,4,opt,name=event,proto3,enum=bbgo.Event" json:"event,omitempty"` // snapshot, update ... + Balances []*Balance `protobuf:"bytes,5,rep,name=balances,proto3" json:"balances,omitempty"` + Trades []*Trade `protobuf:"bytes,6,rep,name=trades,proto3" json:"trades,omitempty"` + Orders []*Order `protobuf:"bytes,7,rep,name=orders,proto3" json:"orders,omitempty"` +} + +func (x *UserData) Reset() { + *x = UserData{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UserData) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UserData) ProtoMessage() {} + +func (x *UserData) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UserData.ProtoReflect.Descriptor instead. +func (*UserData) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{3} +} + +func (x *UserData) GetSession() string { + if x != nil { + return x.Session + } + return "" +} + +func (x *UserData) GetExchange() string { + if x != nil { + return x.Exchange + } + return "" +} + +func (x *UserData) GetChannel() Channel { + if x != nil { + return x.Channel + } + return Channel_BOOK +} + +func (x *UserData) GetEvent() Event { + if x != nil { + return x.Event + } + return Event_UNKNOWN +} + +func (x *UserData) GetBalances() []*Balance { + if x != nil { + return x.Balances + } + return nil +} + +func (x *UserData) GetTrades() []*Trade { + if x != nil { + return x.Trades + } + return nil +} + +func (x *UserData) GetOrders() []*Order { + if x != nil { + return x.Orders + } + return nil +} + +type SubscribeRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Subscriptions []*Subscription `protobuf:"bytes,1,rep,name=subscriptions,proto3" json:"subscriptions,omitempty"` +} + +func (x *SubscribeRequest) Reset() { + *x = SubscribeRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SubscribeRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SubscribeRequest) ProtoMessage() {} + +func (x *SubscribeRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SubscribeRequest.ProtoReflect.Descriptor instead. +func (*SubscribeRequest) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{4} +} + +func (x *SubscribeRequest) GetSubscriptions() []*Subscription { + if x != nil { + return x.Subscriptions + } + return nil +} + +type Subscription struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Exchange string `protobuf:"bytes,1,opt,name=exchange,proto3" json:"exchange,omitempty"` + Channel Channel `protobuf:"varint,2,opt,name=channel,proto3,enum=bbgo.Channel" json:"channel,omitempty"` // book, trade, ticker + Symbol string `protobuf:"bytes,3,opt,name=symbol,proto3" json:"symbol,omitempty"` + Depth string `protobuf:"bytes,4,opt,name=depth,proto3" json:"depth,omitempty"` // depth is for book, valid values are full, medium, 1, 5 and 20 + Interval string `protobuf:"bytes,5,opt,name=interval,proto3" json:"interval,omitempty"` // interval is for kline channel +} + +func (x *Subscription) Reset() { + *x = Subscription{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Subscription) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Subscription) ProtoMessage() {} + +func (x *Subscription) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Subscription.ProtoReflect.Descriptor instead. +func (*Subscription) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{5} +} + +func (x *Subscription) GetExchange() string { + if x != nil { + return x.Exchange + } + return "" +} + +func (x *Subscription) GetChannel() Channel { + if x != nil { + return x.Channel + } + return Channel_BOOK +} + +func (x *Subscription) GetSymbol() string { + if x != nil { + return x.Symbol + } + return "" +} + +func (x *Subscription) GetDepth() string { + if x != nil { + return x.Depth + } + return "" +} + +func (x *Subscription) GetInterval() string { + if x != nil { + return x.Interval + } + return "" +} + +type MarketData struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + Exchange string `protobuf:"bytes,2,opt,name=exchange,proto3" json:"exchange,omitempty"` + Symbol string `protobuf:"bytes,3,opt,name=symbol,proto3" json:"symbol,omitempty"` + Channel Channel `protobuf:"varint,4,opt,name=channel,proto3,enum=bbgo.Channel" json:"channel,omitempty"` // book, trade, ticker, user + Event Event `protobuf:"varint,5,opt,name=event,proto3,enum=bbgo.Event" json:"event,omitempty"` // snapshot or update + Depth *Depth `protobuf:"bytes,6,opt,name=depth,proto3" json:"depth,omitempty"` // depth: used by book + Kline *KLine `protobuf:"bytes,7,opt,name=kline,proto3" json:"kline,omitempty"` + Ticker *Ticker `protobuf:"bytes,9,opt,name=ticker,proto3" json:"ticker,omitempty"` // market ticker + Trades []*Trade `protobuf:"bytes,8,rep,name=trades,proto3" json:"trades,omitempty"` // market trades + SubscribedAt int64 `protobuf:"varint,12,opt,name=subscribed_at,json=subscribedAt,proto3" json:"subscribed_at,omitempty"` + Error *Error `protobuf:"bytes,13,opt,name=error,proto3" json:"error,omitempty"` +} + +func (x *MarketData) Reset() { + *x = MarketData{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *MarketData) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MarketData) ProtoMessage() {} + +func (x *MarketData) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MarketData.ProtoReflect.Descriptor instead. +func (*MarketData) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{6} +} + +func (x *MarketData) GetSession() string { + if x != nil { + return x.Session + } + return "" +} + +func (x *MarketData) GetExchange() string { + if x != nil { + return x.Exchange + } + return "" +} + +func (x *MarketData) GetSymbol() string { + if x != nil { + return x.Symbol + } + return "" +} + +func (x *MarketData) GetChannel() Channel { + if x != nil { + return x.Channel + } + return Channel_BOOK +} + +func (x *MarketData) GetEvent() Event { + if x != nil { + return x.Event + } + return Event_UNKNOWN +} + +func (x *MarketData) GetDepth() *Depth { + if x != nil { + return x.Depth + } + return nil +} + +func (x *MarketData) GetKline() *KLine { + if x != nil { + return x.Kline + } + return nil +} + +func (x *MarketData) GetTicker() *Ticker { + if x != nil { + return x.Ticker + } + return nil +} + +func (x *MarketData) GetTrades() []*Trade { + if x != nil { + return x.Trades + } + return nil +} + +func (x *MarketData) GetSubscribedAt() int64 { + if x != nil { + return x.SubscribedAt + } + return 0 +} + +func (x *MarketData) GetError() *Error { + if x != nil { + return x.Error + } + return nil +} + +type Depth struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Exchange string `protobuf:"bytes,1,opt,name=exchange,proto3" json:"exchange,omitempty"` + Symbol string `protobuf:"bytes,2,opt,name=symbol,proto3" json:"symbol,omitempty"` + Asks []*PriceVolume `protobuf:"bytes,3,rep,name=asks,proto3" json:"asks,omitempty"` + Bids []*PriceVolume `protobuf:"bytes,4,rep,name=bids,proto3" json:"bids,omitempty"` +} + +func (x *Depth) Reset() { + *x = Depth{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Depth) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Depth) ProtoMessage() {} + +func (x *Depth) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Depth.ProtoReflect.Descriptor instead. +func (*Depth) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{7} +} + +func (x *Depth) GetExchange() string { + if x != nil { + return x.Exchange + } + return "" +} + +func (x *Depth) GetSymbol() string { + if x != nil { + return x.Symbol + } + return "" +} + +func (x *Depth) GetAsks() []*PriceVolume { + if x != nil { + return x.Asks + } + return nil +} + +func (x *Depth) GetBids() []*PriceVolume { + if x != nil { + return x.Bids + } + return nil +} + +type PriceVolume struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Price string `protobuf:"bytes,1,opt,name=price,proto3" json:"price,omitempty"` + Volume string `protobuf:"bytes,2,opt,name=volume,proto3" json:"volume,omitempty"` +} + +func (x *PriceVolume) Reset() { + *x = PriceVolume{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PriceVolume) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PriceVolume) ProtoMessage() {} + +func (x *PriceVolume) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PriceVolume.ProtoReflect.Descriptor instead. +func (*PriceVolume) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{8} +} + +func (x *PriceVolume) GetPrice() string { + if x != nil { + return x.Price + } + return "" +} + +func (x *PriceVolume) GetVolume() string { + if x != nil { + return x.Volume + } + return "" +} + +// https://maicoin.github.io/max-websocket-docs/#/private_channels?id=trade-response +// https://maicoin.github.io/max-websocket-docs/#/public_trade?id=success-response +type Trade struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + Exchange string `protobuf:"bytes,2,opt,name=exchange,proto3" json:"exchange,omitempty"` + Symbol string `protobuf:"bytes,3,opt,name=symbol,proto3" json:"symbol,omitempty"` + Id string `protobuf:"bytes,4,opt,name=id,proto3" json:"id,omitempty"` + Price string `protobuf:"bytes,5,opt,name=price,proto3" json:"price,omitempty"` + Quantity string `protobuf:"bytes,6,opt,name=quantity,proto3" json:"quantity,omitempty"` + CreatedAt int64 `protobuf:"varint,7,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` + Side Side `protobuf:"varint,8,opt,name=side,proto3,enum=bbgo.Side" json:"side,omitempty"` + FeeCurrency string `protobuf:"bytes,9,opt,name=fee_currency,json=feeCurrency,proto3" json:"fee_currency,omitempty"` + Fee string `protobuf:"bytes,10,opt,name=fee,proto3" json:"fee,omitempty"` + Maker bool `protobuf:"varint,11,opt,name=maker,proto3" json:"maker,omitempty"` +} + +func (x *Trade) Reset() { + *x = Trade{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Trade) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Trade) ProtoMessage() {} + +func (x *Trade) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Trade.ProtoReflect.Descriptor instead. +func (*Trade) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{9} +} + +func (x *Trade) GetSession() string { + if x != nil { + return x.Session + } + return "" +} + +func (x *Trade) GetExchange() string { + if x != nil { + return x.Exchange + } + return "" +} + +func (x *Trade) GetSymbol() string { + if x != nil { + return x.Symbol + } + return "" +} + +func (x *Trade) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *Trade) GetPrice() string { + if x != nil { + return x.Price + } + return "" +} + +func (x *Trade) GetQuantity() string { + if x != nil { + return x.Quantity + } + return "" +} + +func (x *Trade) GetCreatedAt() int64 { + if x != nil { + return x.CreatedAt + } + return 0 +} + +func (x *Trade) GetSide() Side { + if x != nil { + return x.Side + } + return Side_BUY +} + +func (x *Trade) GetFeeCurrency() string { + if x != nil { + return x.FeeCurrency + } + return "" +} + +func (x *Trade) GetFee() string { + if x != nil { + return x.Fee + } + return "" +} + +func (x *Trade) GetMaker() bool { + if x != nil { + return x.Maker + } + return false +} + +// https://maicoin.github.io/max-websocket-docs/#/public_ticker?id=success-response +type Ticker struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Exchange string `protobuf:"bytes,1,opt,name=exchange,proto3" json:"exchange,omitempty"` + Symbol string `protobuf:"bytes,2,opt,name=symbol,proto3" json:"symbol,omitempty"` + Open float64 `protobuf:"fixed64,3,opt,name=open,proto3" json:"open,omitempty"` + High float64 `protobuf:"fixed64,4,opt,name=high,proto3" json:"high,omitempty"` + Low float64 `protobuf:"fixed64,5,opt,name=low,proto3" json:"low,omitempty"` + Close float64 `protobuf:"fixed64,6,opt,name=close,proto3" json:"close,omitempty"` + Volume float64 `protobuf:"fixed64,7,opt,name=volume,proto3" json:"volume,omitempty"` +} + +func (x *Ticker) Reset() { + *x = Ticker{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Ticker) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Ticker) ProtoMessage() {} + +func (x *Ticker) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Ticker.ProtoReflect.Descriptor instead. +func (*Ticker) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{10} +} + +func (x *Ticker) GetExchange() string { + if x != nil { + return x.Exchange + } + return "" +} + +func (x *Ticker) GetSymbol() string { + if x != nil { + return x.Symbol + } + return "" +} + +func (x *Ticker) GetOpen() float64 { + if x != nil { + return x.Open + } + return 0 +} + +func (x *Ticker) GetHigh() float64 { + if x != nil { + return x.High + } + return 0 +} + +func (x *Ticker) GetLow() float64 { + if x != nil { + return x.Low + } + return 0 +} + +func (x *Ticker) GetClose() float64 { + if x != nil { + return x.Close + } + return 0 +} + +func (x *Ticker) GetVolume() float64 { + if x != nil { + return x.Volume + } + return 0 +} + +// https://maicoin.github.io/max-websocket-docs/#/private_channels?id=snapshot +type Order struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Exchange string `protobuf:"bytes,1,opt,name=exchange,proto3" json:"exchange,omitempty"` + Symbol string `protobuf:"bytes,2,opt,name=symbol,proto3" json:"symbol,omitempty"` + Id string `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` + Side Side `protobuf:"varint,4,opt,name=side,proto3,enum=bbgo.Side" json:"side,omitempty"` + OrderType OrderType `protobuf:"varint,5,opt,name=order_type,json=orderType,proto3,enum=bbgo.OrderType" json:"order_type,omitempty"` + Price string `protobuf:"bytes,6,opt,name=price,proto3" json:"price,omitempty"` + StopPrice string `protobuf:"bytes,7,opt,name=stop_price,json=stopPrice,proto3" json:"stop_price,omitempty"` + Status string `protobuf:"bytes,9,opt,name=status,proto3" json:"status,omitempty"` + Quantity string `protobuf:"bytes,11,opt,name=quantity,proto3" json:"quantity,omitempty"` + ExecutedQuantity string `protobuf:"bytes,12,opt,name=executed_quantity,json=executedQuantity,proto3" json:"executed_quantity,omitempty"` + ClientOrderId string `protobuf:"bytes,14,opt,name=client_order_id,json=clientOrderId,proto3" json:"client_order_id,omitempty"` + GroupId int64 `protobuf:"varint,15,opt,name=group_id,json=groupId,proto3" json:"group_id,omitempty"` + CreatedAt int64 `protobuf:"varint,10,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` +} + +func (x *Order) Reset() { + *x = Order{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Order) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Order) ProtoMessage() {} + +func (x *Order) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Order.ProtoReflect.Descriptor instead. +func (*Order) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{11} +} + +func (x *Order) GetExchange() string { + if x != nil { + return x.Exchange + } + return "" +} + +func (x *Order) GetSymbol() string { + if x != nil { + return x.Symbol + } + return "" +} + +func (x *Order) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *Order) GetSide() Side { + if x != nil { + return x.Side + } + return Side_BUY +} + +func (x *Order) GetOrderType() OrderType { + if x != nil { + return x.OrderType + } + return OrderType_MARKET +} + +func (x *Order) GetPrice() string { + if x != nil { + return x.Price + } + return "" +} + +func (x *Order) GetStopPrice() string { + if x != nil { + return x.StopPrice + } + return "" +} + +func (x *Order) GetStatus() string { + if x != nil { + return x.Status + } + return "" +} + +func (x *Order) GetQuantity() string { + if x != nil { + return x.Quantity + } + return "" +} + +func (x *Order) GetExecutedQuantity() string { + if x != nil { + return x.ExecutedQuantity + } + return "" +} + +func (x *Order) GetClientOrderId() string { + if x != nil { + return x.ClientOrderId + } + return "" +} + +func (x *Order) GetGroupId() int64 { + if x != nil { + return x.GroupId + } + return 0 +} + +func (x *Order) GetCreatedAt() int64 { + if x != nil { + return x.CreatedAt + } + return 0 +} + +type SubmitOrder struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + Exchange string `protobuf:"bytes,2,opt,name=exchange,proto3" json:"exchange,omitempty"` + Symbol string `protobuf:"bytes,3,opt,name=symbol,proto3" json:"symbol,omitempty"` + Side Side `protobuf:"varint,4,opt,name=side,proto3,enum=bbgo.Side" json:"side,omitempty"` + Price string `protobuf:"bytes,6,opt,name=price,proto3" json:"price,omitempty"` + Quantity string `protobuf:"bytes,5,opt,name=quantity,proto3" json:"quantity,omitempty"` + StopPrice string `protobuf:"bytes,7,opt,name=stop_price,json=stopPrice,proto3" json:"stop_price,omitempty"` + OrderType OrderType `protobuf:"varint,8,opt,name=order_type,json=orderType,proto3,enum=bbgo.OrderType" json:"order_type,omitempty"` + ClientOrderId string `protobuf:"bytes,9,opt,name=client_order_id,json=clientOrderId,proto3" json:"client_order_id,omitempty"` + GroupId int64 `protobuf:"varint,10,opt,name=group_id,json=groupId,proto3" json:"group_id,omitempty"` +} + +func (x *SubmitOrder) Reset() { + *x = SubmitOrder{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SubmitOrder) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SubmitOrder) ProtoMessage() {} + +func (x *SubmitOrder) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SubmitOrder.ProtoReflect.Descriptor instead. +func (*SubmitOrder) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{12} +} + +func (x *SubmitOrder) GetSession() string { + if x != nil { + return x.Session + } + return "" +} + +func (x *SubmitOrder) GetExchange() string { + if x != nil { + return x.Exchange + } + return "" +} + +func (x *SubmitOrder) GetSymbol() string { + if x != nil { + return x.Symbol + } + return "" +} + +func (x *SubmitOrder) GetSide() Side { + if x != nil { + return x.Side + } + return Side_BUY +} + +func (x *SubmitOrder) GetPrice() string { + if x != nil { + return x.Price + } + return "" +} + +func (x *SubmitOrder) GetQuantity() string { + if x != nil { + return x.Quantity + } + return "" +} + +func (x *SubmitOrder) GetStopPrice() string { + if x != nil { + return x.StopPrice + } + return "" +} + +func (x *SubmitOrder) GetOrderType() OrderType { + if x != nil { + return x.OrderType + } + return OrderType_MARKET +} + +func (x *SubmitOrder) GetClientOrderId() string { + if x != nil { + return x.ClientOrderId + } + return "" +} + +func (x *SubmitOrder) GetGroupId() int64 { + if x != nil { + return x.GroupId + } + return 0 +} + +// https://maicoin.github.io/max-websocket-docs/#/private_channels?id=account-response +type Balance struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + Exchange string `protobuf:"bytes,2,opt,name=exchange,proto3" json:"exchange,omitempty"` + Currency string `protobuf:"bytes,3,opt,name=currency,proto3" json:"currency,omitempty"` + Available string `protobuf:"bytes,4,opt,name=available,proto3" json:"available,omitempty"` + Locked string `protobuf:"bytes,5,opt,name=locked,proto3" json:"locked,omitempty"` + Borrowed string `protobuf:"bytes,6,opt,name=borrowed,proto3" json:"borrowed,omitempty"` +} + +func (x *Balance) Reset() { + *x = Balance{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Balance) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Balance) ProtoMessage() {} + +func (x *Balance) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Balance.ProtoReflect.Descriptor instead. +func (*Balance) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{13} +} + +func (x *Balance) GetSession() string { + if x != nil { + return x.Session + } + return "" +} + +func (x *Balance) GetExchange() string { + if x != nil { + return x.Exchange + } + return "" +} + +func (x *Balance) GetCurrency() string { + if x != nil { + return x.Currency + } + return "" +} + +func (x *Balance) GetAvailable() string { + if x != nil { + return x.Available + } + return "" +} + +func (x *Balance) GetLocked() string { + if x != nil { + return x.Locked + } + return "" +} + +func (x *Balance) GetBorrowed() string { + if x != nil { + return x.Borrowed + } + return "" +} + +type SubmitOrderRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + SubmitOrders []*SubmitOrder `protobuf:"bytes,2,rep,name=submit_orders,json=submitOrders,proto3" json:"submit_orders,omitempty"` +} + +func (x *SubmitOrderRequest) Reset() { + *x = SubmitOrderRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SubmitOrderRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SubmitOrderRequest) ProtoMessage() {} + +func (x *SubmitOrderRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SubmitOrderRequest.ProtoReflect.Descriptor instead. +func (*SubmitOrderRequest) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{14} +} + +func (x *SubmitOrderRequest) GetSession() string { + if x != nil { + return x.Session + } + return "" +} + +func (x *SubmitOrderRequest) GetSubmitOrders() []*SubmitOrder { + if x != nil { + return x.SubmitOrders + } + return nil +} + +type SubmitOrderResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + Orders []*Order `protobuf:"bytes,2,rep,name=orders,proto3" json:"orders,omitempty"` + Error *Error `protobuf:"bytes,3,opt,name=error,proto3" json:"error,omitempty"` +} + +func (x *SubmitOrderResponse) Reset() { + *x = SubmitOrderResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SubmitOrderResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SubmitOrderResponse) ProtoMessage() {} + +func (x *SubmitOrderResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SubmitOrderResponse.ProtoReflect.Descriptor instead. +func (*SubmitOrderResponse) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{15} +} + +func (x *SubmitOrderResponse) GetSession() string { + if x != nil { + return x.Session + } + return "" +} + +func (x *SubmitOrderResponse) GetOrders() []*Order { + if x != nil { + return x.Orders + } + return nil +} + +func (x *SubmitOrderResponse) GetError() *Error { + if x != nil { + return x.Error + } + return nil +} + +type CancelOrderRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + OrderId string `protobuf:"bytes,2,opt,name=order_id,json=orderId,proto3" json:"order_id,omitempty"` + ClientOrderId string `protobuf:"bytes,3,opt,name=client_order_id,json=clientOrderId,proto3" json:"client_order_id,omitempty"` +} + +func (x *CancelOrderRequest) Reset() { + *x = CancelOrderRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CancelOrderRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CancelOrderRequest) ProtoMessage() {} + +func (x *CancelOrderRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CancelOrderRequest.ProtoReflect.Descriptor instead. +func (*CancelOrderRequest) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{16} +} + +func (x *CancelOrderRequest) GetSession() string { + if x != nil { + return x.Session + } + return "" +} + +func (x *CancelOrderRequest) GetOrderId() string { + if x != nil { + return x.OrderId + } + return "" +} + +func (x *CancelOrderRequest) GetClientOrderId() string { + if x != nil { + return x.ClientOrderId + } + return "" +} + +type CancelOrderResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Order *Order `protobuf:"bytes,1,opt,name=order,proto3" json:"order,omitempty"` + Error *Error `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"` +} + +func (x *CancelOrderResponse) Reset() { + *x = CancelOrderResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CancelOrderResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CancelOrderResponse) ProtoMessage() {} + +func (x *CancelOrderResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[17] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CancelOrderResponse.ProtoReflect.Descriptor instead. +func (*CancelOrderResponse) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{17} +} + +func (x *CancelOrderResponse) GetOrder() *Order { + if x != nil { + return x.Order + } + return nil +} + +func (x *CancelOrderResponse) GetError() *Error { + if x != nil { + return x.Error + } + return nil +} + +type QueryOrderRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + ClientOrderId string `protobuf:"bytes,3,opt,name=client_order_id,json=clientOrderId,proto3" json:"client_order_id,omitempty"` +} + +func (x *QueryOrderRequest) Reset() { + *x = QueryOrderRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryOrderRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryOrderRequest) ProtoMessage() {} + +func (x *QueryOrderRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryOrderRequest.ProtoReflect.Descriptor instead. +func (*QueryOrderRequest) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{18} +} + +func (x *QueryOrderRequest) GetSession() string { + if x != nil { + return x.Session + } + return "" +} + +func (x *QueryOrderRequest) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *QueryOrderRequest) GetClientOrderId() string { + if x != nil { + return x.ClientOrderId + } + return "" +} + +type QueryOrderResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Order *Order `protobuf:"bytes,1,opt,name=order,proto3" json:"order,omitempty"` + Error *Error `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"` +} + +func (x *QueryOrderResponse) Reset() { + *x = QueryOrderResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryOrderResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryOrderResponse) ProtoMessage() {} + +func (x *QueryOrderResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryOrderResponse.ProtoReflect.Descriptor instead. +func (*QueryOrderResponse) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{19} +} + +func (x *QueryOrderResponse) GetOrder() *Order { + if x != nil { + return x.Order + } + return nil +} + +func (x *QueryOrderResponse) GetError() *Error { + if x != nil { + return x.Error + } + return nil +} + +type QueryOrdersRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + Symbol string `protobuf:"bytes,2,opt,name=symbol,proto3" json:"symbol,omitempty"` + State []string `protobuf:"bytes,3,rep,name=state,proto3" json:"state,omitempty"` + OrderBy string `protobuf:"bytes,4,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + GroupId int64 `protobuf:"varint,5,opt,name=group_id,json=groupId,proto3" json:"group_id,omitempty"` + Pagination bool `protobuf:"varint,6,opt,name=pagination,proto3" json:"pagination,omitempty"` + Page int64 `protobuf:"varint,7,opt,name=page,proto3" json:"page,omitempty"` + Limit int64 `protobuf:"varint,8,opt,name=limit,proto3" json:"limit,omitempty"` + Offset int64 `protobuf:"varint,9,opt,name=offset,proto3" json:"offset,omitempty"` +} + +func (x *QueryOrdersRequest) Reset() { + *x = QueryOrdersRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryOrdersRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryOrdersRequest) ProtoMessage() {} + +func (x *QueryOrdersRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[20] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryOrdersRequest.ProtoReflect.Descriptor instead. +func (*QueryOrdersRequest) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{20} +} + +func (x *QueryOrdersRequest) GetSession() string { + if x != nil { + return x.Session + } + return "" +} + +func (x *QueryOrdersRequest) GetSymbol() string { + if x != nil { + return x.Symbol + } + return "" +} + +func (x *QueryOrdersRequest) GetState() []string { + if x != nil { + return x.State + } + return nil +} + +func (x *QueryOrdersRequest) GetOrderBy() string { + if x != nil { + return x.OrderBy + } + return "" +} + +func (x *QueryOrdersRequest) GetGroupId() int64 { + if x != nil { + return x.GroupId + } + return 0 +} + +func (x *QueryOrdersRequest) GetPagination() bool { + if x != nil { + return x.Pagination + } + return false +} + +func (x *QueryOrdersRequest) GetPage() int64 { + if x != nil { + return x.Page + } + return 0 +} + +func (x *QueryOrdersRequest) GetLimit() int64 { + if x != nil { + return x.Limit + } + return 0 +} + +func (x *QueryOrdersRequest) GetOffset() int64 { + if x != nil { + return x.Offset + } + return 0 +} + +type QueryOrdersResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Orders []*Order `protobuf:"bytes,1,rep,name=orders,proto3" json:"orders,omitempty"` + Error *Error `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"` +} + +func (x *QueryOrdersResponse) Reset() { + *x = QueryOrdersResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryOrdersResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryOrdersResponse) ProtoMessage() {} + +func (x *QueryOrdersResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[21] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryOrdersResponse.ProtoReflect.Descriptor instead. +func (*QueryOrdersResponse) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{21} +} + +func (x *QueryOrdersResponse) GetOrders() []*Order { + if x != nil { + return x.Orders + } + return nil +} + +func (x *QueryOrdersResponse) GetError() *Error { + if x != nil { + return x.Error + } + return nil +} + +type QueryTradesRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Exchange string `protobuf:"bytes,1,opt,name=exchange,proto3" json:"exchange,omitempty"` + Symbol string `protobuf:"bytes,2,opt,name=symbol,proto3" json:"symbol,omitempty"` + Timestamp int64 `protobuf:"varint,3,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + From int64 `protobuf:"varint,4,opt,name=from,proto3" json:"from,omitempty"` + To int64 `protobuf:"varint,5,opt,name=to,proto3" json:"to,omitempty"` + OrderBy string `protobuf:"bytes,6,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + Pagination bool `protobuf:"varint,7,opt,name=pagination,proto3" json:"pagination,omitempty"` + Page int64 `protobuf:"varint,8,opt,name=page,proto3" json:"page,omitempty"` + Limit int64 `protobuf:"varint,9,opt,name=limit,proto3" json:"limit,omitempty"` + Offset int64 `protobuf:"varint,10,opt,name=offset,proto3" json:"offset,omitempty"` +} + +func (x *QueryTradesRequest) Reset() { + *x = QueryTradesRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryTradesRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryTradesRequest) ProtoMessage() {} + +func (x *QueryTradesRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[22] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryTradesRequest.ProtoReflect.Descriptor instead. +func (*QueryTradesRequest) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{22} +} + +func (x *QueryTradesRequest) GetExchange() string { + if x != nil { + return x.Exchange + } + return "" +} + +func (x *QueryTradesRequest) GetSymbol() string { + if x != nil { + return x.Symbol + } + return "" +} + +func (x *QueryTradesRequest) GetTimestamp() int64 { + if x != nil { + return x.Timestamp + } + return 0 +} + +func (x *QueryTradesRequest) GetFrom() int64 { + if x != nil { + return x.From + } + return 0 +} + +func (x *QueryTradesRequest) GetTo() int64 { + if x != nil { + return x.To + } + return 0 +} + +func (x *QueryTradesRequest) GetOrderBy() string { + if x != nil { + return x.OrderBy + } + return "" +} + +func (x *QueryTradesRequest) GetPagination() bool { + if x != nil { + return x.Pagination + } + return false +} + +func (x *QueryTradesRequest) GetPage() int64 { + if x != nil { + return x.Page + } + return 0 +} + +func (x *QueryTradesRequest) GetLimit() int64 { + if x != nil { + return x.Limit + } + return 0 +} + +func (x *QueryTradesRequest) GetOffset() int64 { + if x != nil { + return x.Offset + } + return 0 +} + +type QueryTradesResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Trades []*Trade `protobuf:"bytes,1,rep,name=trades,proto3" json:"trades,omitempty"` + Error *Error `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"` +} + +func (x *QueryTradesResponse) Reset() { + *x = QueryTradesResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryTradesResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryTradesResponse) ProtoMessage() {} + +func (x *QueryTradesResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[23] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryTradesResponse.ProtoReflect.Descriptor instead. +func (*QueryTradesResponse) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{23} +} + +func (x *QueryTradesResponse) GetTrades() []*Trade { + if x != nil { + return x.Trades + } + return nil +} + +func (x *QueryTradesResponse) GetError() *Error { + if x != nil { + return x.Error + } + return nil +} + +type QueryKLinesRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Exchange string `protobuf:"bytes,1,opt,name=exchange,proto3" json:"exchange,omitempty"` + Symbol string `protobuf:"bytes,2,opt,name=symbol,proto3" json:"symbol,omitempty"` + Interval string `protobuf:"bytes,3,opt,name=interval,proto3" json:"interval,omitempty"` // time period of K line in minute + StartTime int64 `protobuf:"varint,4,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + EndTime int64 `protobuf:"varint,5,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + Limit int64 `protobuf:"varint,6,opt,name=limit,proto3" json:"limit,omitempty"` +} + +func (x *QueryKLinesRequest) Reset() { + *x = QueryKLinesRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryKLinesRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryKLinesRequest) ProtoMessage() {} + +func (x *QueryKLinesRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[24] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryKLinesRequest.ProtoReflect.Descriptor instead. +func (*QueryKLinesRequest) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{24} +} + +func (x *QueryKLinesRequest) GetExchange() string { + if x != nil { + return x.Exchange + } + return "" +} + +func (x *QueryKLinesRequest) GetSymbol() string { + if x != nil { + return x.Symbol + } + return "" +} + +func (x *QueryKLinesRequest) GetInterval() string { + if x != nil { + return x.Interval + } + return "" +} + +func (x *QueryKLinesRequest) GetStartTime() int64 { + if x != nil { + return x.StartTime + } + return 0 +} + +func (x *QueryKLinesRequest) GetEndTime() int64 { + if x != nil { + return x.EndTime + } + return 0 +} + +func (x *QueryKLinesRequest) GetLimit() int64 { + if x != nil { + return x.Limit + } + return 0 +} + +type QueryKLinesResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Klines []*KLine `protobuf:"bytes,1,rep,name=klines,proto3" json:"klines,omitempty"` + Error *Error `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"` +} + +func (x *QueryKLinesResponse) Reset() { + *x = QueryKLinesResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryKLinesResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryKLinesResponse) ProtoMessage() {} + +func (x *QueryKLinesResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[25] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryKLinesResponse.ProtoReflect.Descriptor instead. +func (*QueryKLinesResponse) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{25} +} + +func (x *QueryKLinesResponse) GetKlines() []*KLine { + if x != nil { + return x.Klines + } + return nil +} + +func (x *QueryKLinesResponse) GetError() *Error { + if x != nil { + return x.Error + } + return nil +} + +type KLine struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + Exchange string `protobuf:"bytes,2,opt,name=exchange,proto3" json:"exchange,omitempty"` + Symbol string `protobuf:"bytes,3,opt,name=symbol,proto3" json:"symbol,omitempty"` + Open string `protobuf:"bytes,4,opt,name=open,proto3" json:"open,omitempty"` + High string `protobuf:"bytes,5,opt,name=high,proto3" json:"high,omitempty"` + Low string `protobuf:"bytes,6,opt,name=low,proto3" json:"low,omitempty"` + Close string `protobuf:"bytes,7,opt,name=close,proto3" json:"close,omitempty"` + Volume string `protobuf:"bytes,8,opt,name=volume,proto3" json:"volume,omitempty"` + QuoteVolume string `protobuf:"bytes,9,opt,name=quote_volume,json=quoteVolume,proto3" json:"quote_volume,omitempty"` + StartTime int64 `protobuf:"varint,10,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + EndTime int64 `protobuf:"varint,11,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + Closed bool `protobuf:"varint,12,opt,name=closed,proto3" json:"closed,omitempty"` +} + +func (x *KLine) Reset() { + *x = KLine{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_pb_bbgo_proto_msgTypes[26] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *KLine) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*KLine) ProtoMessage() {} + +func (x *KLine) ProtoReflect() protoreflect.Message { + mi := &file_pkg_pb_bbgo_proto_msgTypes[26] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use KLine.ProtoReflect.Descriptor instead. +func (*KLine) Descriptor() ([]byte, []int) { + return file_pkg_pb_bbgo_proto_rawDescGZIP(), []int{26} +} + +func (x *KLine) GetSession() string { + if x != nil { + return x.Session + } + return "" +} + +func (x *KLine) GetExchange() string { + if x != nil { + return x.Exchange + } + return "" +} + +func (x *KLine) GetSymbol() string { + if x != nil { + return x.Symbol + } + return "" +} + +func (x *KLine) GetOpen() string { + if x != nil { + return x.Open + } + return "" +} + +func (x *KLine) GetHigh() string { + if x != nil { + return x.High + } + return "" +} + +func (x *KLine) GetLow() string { + if x != nil { + return x.Low + } + return "" +} + +func (x *KLine) GetClose() string { + if x != nil { + return x.Close + } + return "" +} + +func (x *KLine) GetVolume() string { + if x != nil { + return x.Volume + } + return "" +} + +func (x *KLine) GetQuoteVolume() string { + if x != nil { + return x.QuoteVolume + } + return "" +} + +func (x *KLine) GetStartTime() int64 { + if x != nil { + return x.StartTime + } + return 0 +} + +func (x *KLine) GetEndTime() int64 { + if x != nil { + return x.EndTime + } + return 0 +} + +func (x *KLine) GetClosed() bool { + if x != nil { + return x.Closed + } + return false +} + +var File_pkg_pb_bbgo_proto protoreflect.FileDescriptor + +var file_pkg_pb_bbgo_proto_rawDesc = []byte{ + 0x0a, 0x11, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x62, 0x2f, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x12, 0x04, 0x62, 0x62, 0x67, 0x6f, 0x22, 0x07, 0x0a, 0x05, 0x45, 0x6d, 0x70, + 0x74, 0x79, 0x22, 0x4b, 0x0a, 0x05, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x1d, 0x0a, 0x0a, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x09, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x43, 0x6f, 0x64, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x72, + 0x72, 0x6f, 0x72, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, + 0x2b, 0x0a, 0x0f, 0x55, 0x73, 0x65, 0x72, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x81, 0x02, 0x0a, + 0x08, 0x55, 0x73, 0x65, 0x72, 0x44, 0x61, 0x74, 0x61, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, + 0x27, 0x0a, 0x07, 0x63, 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x0d, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x52, + 0x07, 0x63, 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x12, 0x21, 0x0a, 0x05, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0b, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x52, 0x05, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x29, 0x0a, 0x08, 0x62, + 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0d, 0x2e, + 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x52, 0x08, 0x62, 0x61, + 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x23, 0x0a, 0x06, 0x74, 0x72, 0x61, 0x64, 0x65, 0x73, + 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x54, 0x72, + 0x61, 0x64, 0x65, 0x52, 0x06, 0x74, 0x72, 0x61, 0x64, 0x65, 0x73, 0x12, 0x23, 0x0a, 0x06, 0x6f, + 0x72, 0x64, 0x65, 0x72, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x62, 0x62, + 0x67, 0x6f, 0x2e, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, 0x06, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x73, + 0x22, 0x4c, 0x0a, 0x10, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x38, 0x0a, 0x0d, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x62, 0x62, + 0x67, 0x6f, 0x2e, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, + 0x0d, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x9d, + 0x01, 0x0a, 0x0c, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, + 0x1a, 0x0a, 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x27, 0x0a, 0x07, 0x63, + 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0d, 0x2e, 0x62, + 0x62, 0x67, 0x6f, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x52, 0x07, 0x63, 0x68, 0x61, + 0x6e, 0x6e, 0x65, 0x6c, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x12, 0x14, 0x0a, 0x05, + 0x64, 0x65, 0x70, 0x74, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x64, 0x65, 0x70, + 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x22, 0xff, + 0x02, 0x0a, 0x0a, 0x4d, 0x61, 0x72, 0x6b, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x12, 0x18, 0x0a, + 0x07, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, + 0x6e, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, + 0x6e, 0x67, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x12, 0x27, 0x0a, 0x07, 0x63, + 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0d, 0x2e, 0x62, + 0x62, 0x67, 0x6f, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x52, 0x07, 0x63, 0x68, 0x61, + 0x6e, 0x6e, 0x65, 0x6c, 0x12, 0x21, 0x0a, 0x05, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x18, 0x05, 0x20, + 0x01, 0x28, 0x0e, 0x32, 0x0b, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, + 0x52, 0x05, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x21, 0x0a, 0x05, 0x64, 0x65, 0x70, 0x74, 0x68, + 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x44, 0x65, + 0x70, 0x74, 0x68, 0x52, 0x05, 0x64, 0x65, 0x70, 0x74, 0x68, 0x12, 0x21, 0x0a, 0x05, 0x6b, 0x6c, + 0x69, 0x6e, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x62, 0x62, 0x67, 0x6f, + 0x2e, 0x4b, 0x4c, 0x69, 0x6e, 0x65, 0x52, 0x05, 0x6b, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x24, 0x0a, + 0x06, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x72, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, + 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x72, 0x52, 0x06, 0x74, 0x69, 0x63, + 0x6b, 0x65, 0x72, 0x12, 0x23, 0x0a, 0x06, 0x74, 0x72, 0x61, 0x64, 0x65, 0x73, 0x18, 0x08, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x54, 0x72, 0x61, 0x64, 0x65, + 0x52, 0x06, 0x74, 0x72, 0x61, 0x64, 0x65, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x73, 0x75, 0x62, 0x73, + 0x63, 0x72, 0x69, 0x62, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x0c, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x64, 0x41, 0x74, 0x12, 0x21, 0x0a, + 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x62, + 0x62, 0x67, 0x6f, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x22, 0x89, 0x01, 0x0a, 0x05, 0x44, 0x65, 0x70, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x78, + 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x65, 0x78, + 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x12, 0x25, + 0x0a, 0x04, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x62, + 0x62, 0x67, 0x6f, 0x2e, 0x50, 0x72, 0x69, 0x63, 0x65, 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x52, + 0x04, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x25, 0x0a, 0x04, 0x62, 0x69, 0x64, 0x73, 0x18, 0x04, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x50, 0x72, 0x69, 0x63, 0x65, + 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x52, 0x04, 0x62, 0x69, 0x64, 0x73, 0x22, 0x3b, 0x0a, 0x0b, + 0x50, 0x72, 0x69, 0x63, 0x65, 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x70, + 0x72, 0x69, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x70, 0x72, 0x69, 0x63, + 0x65, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x06, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x22, 0xa1, 0x02, 0x0a, 0x05, 0x54, 0x72, + 0x61, 0x64, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1a, 0x0a, + 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x79, 0x6d, + 0x62, 0x6f, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, + 0x6c, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, + 0x64, 0x12, 0x14, 0x0a, 0x05, 0x70, 0x72, 0x69, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x70, 0x72, 0x69, 0x63, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x71, 0x75, 0x61, 0x6e, 0x74, + 0x69, 0x74, 0x79, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x71, 0x75, 0x61, 0x6e, 0x74, + 0x69, 0x74, 0x79, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, + 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, + 0x41, 0x74, 0x12, 0x1e, 0x0a, 0x04, 0x73, 0x69, 0x64, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x0a, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x53, 0x69, 0x64, 0x65, 0x52, 0x04, 0x73, 0x69, + 0x64, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x66, 0x65, 0x65, 0x5f, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x63, 0x79, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x66, 0x65, 0x65, 0x43, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x63, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x66, 0x65, 0x65, 0x18, 0x0a, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x03, 0x66, 0x65, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6d, 0x61, 0x6b, 0x65, 0x72, + 0x18, 0x0b, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x6d, 0x61, 0x6b, 0x65, 0x72, 0x22, 0xa4, 0x01, + 0x0a, 0x06, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x72, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x78, 0x63, 0x68, + 0x61, 0x6e, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x65, 0x78, 0x63, 0x68, + 0x61, 0x6e, 0x67, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x12, 0x12, 0x0a, 0x04, + 0x6f, 0x70, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x04, 0x6f, 0x70, 0x65, 0x6e, + 0x12, 0x12, 0x0a, 0x04, 0x68, 0x69, 0x67, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x01, 0x52, 0x04, + 0x68, 0x69, 0x67, 0x68, 0x12, 0x10, 0x0a, 0x03, 0x6c, 0x6f, 0x77, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x01, 0x52, 0x03, 0x6c, 0x6f, 0x77, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6c, 0x6f, 0x73, 0x65, 0x18, + 0x06, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x63, 0x6c, 0x6f, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, + 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x01, 0x52, 0x06, 0x76, 0x6f, + 0x6c, 0x75, 0x6d, 0x65, 0x22, 0x93, 0x03, 0x0a, 0x05, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x1a, + 0x0a, 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x79, + 0x6d, 0x62, 0x6f, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x79, 0x6d, 0x62, + 0x6f, 0x6c, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, + 0x69, 0x64, 0x12, 0x1e, 0x0a, 0x04, 0x73, 0x69, 0x64, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x0a, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x53, 0x69, 0x64, 0x65, 0x52, 0x04, 0x73, 0x69, + 0x64, 0x65, 0x12, 0x2e, 0x0a, 0x0a, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x5f, 0x74, 0x79, 0x70, 0x65, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0f, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x4f, 0x72, + 0x64, 0x65, 0x72, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x54, 0x79, + 0x70, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x70, 0x72, 0x69, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x05, 0x70, 0x72, 0x69, 0x63, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x74, 0x6f, 0x70, + 0x5f, 0x70, 0x72, 0x69, 0x63, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, + 0x6f, 0x70, 0x50, 0x72, 0x69, 0x63, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, + 0x73, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, + 0x1a, 0x0a, 0x08, 0x71, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x18, 0x0b, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x08, 0x71, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x12, 0x2b, 0x0a, 0x11, 0x65, + 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x64, 0x5f, 0x71, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x74, 0x79, + 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x64, + 0x51, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x12, 0x26, 0x0a, 0x0f, 0x63, 0x6c, 0x69, 0x65, + 0x6e, 0x74, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x0e, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0d, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x49, 0x64, + 0x12, 0x19, 0x0a, 0x08, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x69, 0x64, 0x18, 0x0f, 0x20, 0x01, + 0x28, 0x03, 0x52, 0x07, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x63, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x22, 0xbf, 0x02, 0x0a, 0x0b, 0x53, + 0x75, 0x62, 0x6d, 0x69, 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, + 0x12, 0x16, 0x0a, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x12, 0x1e, 0x0a, 0x04, 0x73, 0x69, 0x64, 0x65, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0a, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x53, 0x69, + 0x64, 0x65, 0x52, 0x04, 0x73, 0x69, 0x64, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x70, 0x72, 0x69, 0x63, + 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x70, 0x72, 0x69, 0x63, 0x65, 0x12, 0x1a, + 0x0a, 0x08, 0x71, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x08, 0x71, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x74, + 0x6f, 0x70, 0x5f, 0x70, 0x72, 0x69, 0x63, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, + 0x73, 0x74, 0x6f, 0x70, 0x50, 0x72, 0x69, 0x63, 0x65, 0x12, 0x2e, 0x0a, 0x0a, 0x6f, 0x72, 0x64, + 0x65, 0x72, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0f, 0x2e, + 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, + 0x6f, 0x72, 0x64, 0x65, 0x72, 0x54, 0x79, 0x70, 0x65, 0x12, 0x26, 0x0a, 0x0f, 0x63, 0x6c, 0x69, + 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x09, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0d, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x49, + 0x64, 0x12, 0x19, 0x0a, 0x08, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x69, 0x64, 0x18, 0x0a, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x07, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x49, 0x64, 0x22, 0xad, 0x01, 0x0a, + 0x07, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x1a, + 0x0a, 0x08, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x08, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x12, 0x1c, 0x0a, 0x09, 0x61, 0x76, + 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x61, + 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x6f, 0x63, 0x6b, + 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, + 0x12, 0x1a, 0x0a, 0x08, 0x62, 0x6f, 0x72, 0x72, 0x6f, 0x77, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x62, 0x6f, 0x72, 0x72, 0x6f, 0x77, 0x65, 0x64, 0x22, 0x66, 0x0a, 0x12, + 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x36, 0x0a, 0x0d, + 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x53, 0x75, 0x62, 0x6d, 0x69, + 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, 0x0c, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x4f, 0x72, + 0x64, 0x65, 0x72, 0x73, 0x22, 0x77, 0x0a, 0x13, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x4f, 0x72, + 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x06, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x73, 0x18, + 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x4f, 0x72, 0x64, + 0x65, 0x72, 0x52, 0x06, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x73, 0x12, 0x21, 0x0a, 0x05, 0x65, 0x72, + 0x72, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x62, 0x62, 0x67, 0x6f, + 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x71, 0x0a, + 0x12, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x19, 0x0a, + 0x08, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x07, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x49, 0x64, 0x12, 0x26, 0x0a, 0x0f, 0x63, 0x6c, 0x69, 0x65, + 0x6e, 0x74, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0d, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x49, 0x64, + 0x22, 0x5b, 0x0a, 0x13, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x21, 0x0a, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x4f, 0x72, + 0x64, 0x65, 0x72, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x21, 0x0a, 0x05, 0x65, 0x72, + 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x62, 0x62, 0x67, 0x6f, + 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x65, 0x0a, + 0x11, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x26, 0x0a, 0x0f, + 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4f, 0x72, 0x64, + 0x65, 0x72, 0x49, 0x64, 0x22, 0x5a, 0x0a, 0x12, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4f, 0x72, 0x64, + 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x21, 0x0a, 0x05, 0x6f, 0x72, + 0x64, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x62, 0x62, 0x67, 0x6f, + 0x2e, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x21, 0x0a, + 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x62, + 0x62, 0x67, 0x6f, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x22, 0xf4, 0x01, 0x0a, 0x12, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x73, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, + 0x19, 0x0a, 0x08, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x5f, 0x62, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x07, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x42, 0x79, 0x12, 0x19, 0x0a, 0x08, 0x67, 0x72, + 0x6f, 0x75, 0x70, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x67, 0x72, + 0x6f, 0x75, 0x70, 0x49, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x70, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x70, 0x61, 0x67, 0x69, 0x6e, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x67, 0x65, 0x18, 0x07, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, + 0x69, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x12, + 0x16, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x22, 0x5d, 0x0a, 0x13, 0x51, 0x75, 0x65, 0x72, 0x79, + 0x4f, 0x72, 0x64, 0x65, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x23, + 0x0a, 0x06, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0b, + 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, 0x06, 0x6f, 0x72, 0x64, + 0x65, 0x72, 0x73, 0x12, 0x21, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, + 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x87, 0x02, 0x0a, 0x12, 0x51, 0x75, 0x65, 0x72, 0x79, + 0x54, 0x72, 0x61, 0x64, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, + 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x79, 0x6d, + 0x62, 0x6f, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, + 0x6c, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, + 0x12, 0x0a, 0x04, 0x66, 0x72, 0x6f, 0x6d, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x66, + 0x72, 0x6f, 0x6d, 0x12, 0x0e, 0x0a, 0x02, 0x74, 0x6f, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x02, 0x74, 0x6f, 0x12, 0x19, 0x0a, 0x08, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x5f, 0x62, 0x79, 0x18, + 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x42, 0x79, 0x12, 0x1e, + 0x0a, 0x0a, 0x70, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x0a, 0x70, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, + 0x0a, 0x04, 0x70, 0x61, 0x67, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x70, 0x61, + 0x67, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, + 0x03, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, + 0x65, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x03, 0x52, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, + 0x22, 0x5d, 0x0a, 0x13, 0x51, 0x75, 0x65, 0x72, 0x79, 0x54, 0x72, 0x61, 0x64, 0x65, 0x73, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x23, 0x0a, 0x06, 0x74, 0x72, 0x61, 0x64, 0x65, + 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x54, + 0x72, 0x61, 0x64, 0x65, 0x52, 0x06, 0x74, 0x72, 0x61, 0x64, 0x65, 0x73, 0x12, 0x21, 0x0a, 0x05, + 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x62, 0x62, + 0x67, 0x6f, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, + 0xb4, 0x01, 0x0a, 0x12, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4b, 0x4c, 0x69, 0x6e, 0x65, 0x73, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, 0x6e, + 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x65, 0x78, 0x63, 0x68, 0x61, 0x6e, + 0x67, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, + 0x74, 0x69, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, + 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x74, 0x69, 0x6d, + 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d, 0x65, + 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0x5d, 0x0a, 0x13, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4b, + 0x4c, 0x69, 0x6e, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x23, 0x0a, + 0x06, 0x6b, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0b, 0x2e, + 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x4b, 0x4c, 0x69, 0x6e, 0x65, 0x52, 0x06, 0x6b, 0x6c, 0x69, 0x6e, + 0x65, 0x73, 0x12, 0x21, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x0b, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x05, + 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0xb2, 0x02, 0x0a, 0x05, 0x4b, 0x4c, 0x69, 0x6e, 0x65, 0x12, + 0x18, 0x0a, 0x07, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x07, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x78, 0x63, + 0x68, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x65, 0x78, 0x63, + 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x12, 0x12, 0x0a, + 0x04, 0x6f, 0x70, 0x65, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6f, 0x70, 0x65, + 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x69, 0x67, 0x68, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x68, 0x69, 0x67, 0x68, 0x12, 0x10, 0x0a, 0x03, 0x6c, 0x6f, 0x77, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x03, 0x6c, 0x6f, 0x77, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6c, 0x6f, 0x73, 0x65, + 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x63, 0x6c, 0x6f, 0x73, 0x65, 0x12, 0x16, 0x0a, + 0x06, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x76, + 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x71, 0x75, 0x6f, 0x74, 0x65, 0x5f, 0x76, + 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x71, 0x75, 0x6f, + 0x74, 0x65, 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, + 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x73, 0x74, + 0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x74, + 0x69, 0x6d, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69, + 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6c, 0x6f, 0x73, 0x65, 0x64, 0x18, 0x0c, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x06, 0x63, 0x6c, 0x6f, 0x73, 0x65, 0x64, 0x2a, 0x6e, 0x0a, 0x05, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, + 0x12, 0x0e, 0x0a, 0x0a, 0x53, 0x55, 0x42, 0x53, 0x43, 0x52, 0x49, 0x42, 0x45, 0x44, 0x10, 0x01, + 0x12, 0x10, 0x0a, 0x0c, 0x55, 0x4e, 0x53, 0x55, 0x42, 0x53, 0x43, 0x52, 0x49, 0x42, 0x45, 0x44, + 0x10, 0x02, 0x12, 0x0c, 0x0a, 0x08, 0x53, 0x4e, 0x41, 0x50, 0x53, 0x48, 0x4f, 0x54, 0x10, 0x03, + 0x12, 0x0a, 0x0a, 0x06, 0x55, 0x50, 0x44, 0x41, 0x54, 0x45, 0x10, 0x04, 0x12, 0x11, 0x0a, 0x0d, + 0x41, 0x55, 0x54, 0x48, 0x45, 0x4e, 0x54, 0x49, 0x43, 0x41, 0x54, 0x45, 0x44, 0x10, 0x05, 0x12, + 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x63, 0x2a, 0x4d, 0x0a, 0x07, 0x43, 0x68, + 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x12, 0x08, 0x0a, 0x04, 0x42, 0x4f, 0x4f, 0x4b, 0x10, 0x00, 0x12, + 0x09, 0x0a, 0x05, 0x54, 0x52, 0x41, 0x44, 0x45, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x54, 0x49, + 0x43, 0x4b, 0x45, 0x52, 0x10, 0x02, 0x12, 0x09, 0x0a, 0x05, 0x4b, 0x4c, 0x49, 0x4e, 0x45, 0x10, + 0x03, 0x12, 0x0b, 0x0a, 0x07, 0x42, 0x41, 0x4c, 0x41, 0x4e, 0x43, 0x45, 0x10, 0x04, 0x12, 0x09, + 0x0a, 0x05, 0x4f, 0x52, 0x44, 0x45, 0x52, 0x10, 0x05, 0x2a, 0x19, 0x0a, 0x04, 0x53, 0x69, 0x64, + 0x65, 0x12, 0x07, 0x0a, 0x03, 0x42, 0x55, 0x59, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x53, 0x45, + 0x4c, 0x4c, 0x10, 0x01, 0x2a, 0x61, 0x0a, 0x09, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x54, 0x79, 0x70, + 0x65, 0x12, 0x0a, 0x0a, 0x06, 0x4d, 0x41, 0x52, 0x4b, 0x45, 0x54, 0x10, 0x00, 0x12, 0x09, 0x0a, + 0x05, 0x4c, 0x49, 0x4d, 0x49, 0x54, 0x10, 0x01, 0x12, 0x0f, 0x0a, 0x0b, 0x53, 0x54, 0x4f, 0x50, + 0x5f, 0x4d, 0x41, 0x52, 0x4b, 0x45, 0x54, 0x10, 0x02, 0x12, 0x0e, 0x0a, 0x0a, 0x53, 0x54, 0x4f, + 0x50, 0x5f, 0x4c, 0x49, 0x4d, 0x49, 0x54, 0x10, 0x03, 0x12, 0x0d, 0x0a, 0x09, 0x50, 0x4f, 0x53, + 0x54, 0x5f, 0x4f, 0x4e, 0x4c, 0x59, 0x10, 0x04, 0x12, 0x0d, 0x0a, 0x09, 0x49, 0x4f, 0x43, 0x5f, + 0x4c, 0x49, 0x4d, 0x49, 0x54, 0x10, 0x05, 0x32, 0x94, 0x01, 0x0a, 0x11, 0x4d, 0x61, 0x72, 0x6b, + 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x39, 0x0a, + 0x09, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x12, 0x16, 0x2e, 0x62, 0x62, 0x67, + 0x6f, 0x2e, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x10, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x4d, 0x61, 0x72, 0x6b, 0x65, 0x74, + 0x44, 0x61, 0x74, 0x61, 0x22, 0x00, 0x30, 0x01, 0x12, 0x44, 0x0a, 0x0b, 0x51, 0x75, 0x65, 0x72, + 0x79, 0x4b, 0x4c, 0x69, 0x6e, 0x65, 0x73, 0x12, 0x18, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x51, + 0x75, 0x65, 0x72, 0x79, 0x4b, 0x4c, 0x69, 0x6e, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x19, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4b, 0x4c, + 0x69, 0x6e, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x32, 0x49, + 0x0a, 0x0f, 0x55, 0x73, 0x65, 0x72, 0x44, 0x61, 0x74, 0x61, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, + 0x65, 0x12, 0x36, 0x0a, 0x09, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x12, 0x15, + 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0e, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x55, 0x73, 0x65, + 0x72, 0x44, 0x61, 0x74, 0x61, 0x22, 0x00, 0x30, 0x01, 0x32, 0xeb, 0x02, 0x0a, 0x0e, 0x54, 0x72, + 0x61, 0x64, 0x69, 0x6e, 0x67, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x44, 0x0a, 0x0b, + 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x18, 0x2e, 0x62, 0x62, + 0x67, 0x6f, 0x2e, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x53, 0x75, 0x62, + 0x6d, 0x69, 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x22, 0x00, 0x12, 0x44, 0x0a, 0x0b, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x4f, 0x72, 0x64, 0x65, + 0x72, 0x12, 0x18, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x4f, + 0x72, 0x64, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x62, 0x62, + 0x67, 0x6f, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x41, 0x0a, 0x0a, 0x51, 0x75, 0x65, 0x72, + 0x79, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x17, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x51, 0x75, + 0x65, 0x72, 0x79, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x18, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4f, 0x72, 0x64, 0x65, + 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x44, 0x0a, 0x0b, 0x51, + 0x75, 0x65, 0x72, 0x79, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x73, 0x12, 0x18, 0x2e, 0x62, 0x62, 0x67, + 0x6f, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x51, 0x75, 0x65, 0x72, + 0x79, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, + 0x00, 0x12, 0x44, 0x0a, 0x0b, 0x51, 0x75, 0x65, 0x72, 0x79, 0x54, 0x72, 0x61, 0x64, 0x65, 0x73, + 0x12, 0x18, 0x2e, 0x62, 0x62, 0x67, 0x6f, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x54, 0x72, 0x61, + 0x64, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x62, 0x62, 0x67, + 0x6f, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x54, 0x72, 0x61, 0x64, 0x65, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x07, 0x5a, 0x05, 0x2e, 0x2e, 0x2f, 0x70, 0x62, + 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_pkg_pb_bbgo_proto_rawDescOnce sync.Once + file_pkg_pb_bbgo_proto_rawDescData = file_pkg_pb_bbgo_proto_rawDesc +) + +func file_pkg_pb_bbgo_proto_rawDescGZIP() []byte { + file_pkg_pb_bbgo_proto_rawDescOnce.Do(func() { + file_pkg_pb_bbgo_proto_rawDescData = protoimpl.X.CompressGZIP(file_pkg_pb_bbgo_proto_rawDescData) + }) + return file_pkg_pb_bbgo_proto_rawDescData +} + +var file_pkg_pb_bbgo_proto_enumTypes = make([]protoimpl.EnumInfo, 4) +var file_pkg_pb_bbgo_proto_msgTypes = make([]protoimpl.MessageInfo, 27) +var file_pkg_pb_bbgo_proto_goTypes = []interface{}{ + (Event)(0), // 0: bbgo.Event + (Channel)(0), // 1: bbgo.Channel + (Side)(0), // 2: bbgo.Side + (OrderType)(0), // 3: bbgo.OrderType + (*Empty)(nil), // 4: bbgo.Empty + (*Error)(nil), // 5: bbgo.Error + (*UserDataRequest)(nil), // 6: bbgo.UserDataRequest + (*UserData)(nil), // 7: bbgo.UserData + (*SubscribeRequest)(nil), // 8: bbgo.SubscribeRequest + (*Subscription)(nil), // 9: bbgo.Subscription + (*MarketData)(nil), // 10: bbgo.MarketData + (*Depth)(nil), // 11: bbgo.Depth + (*PriceVolume)(nil), // 12: bbgo.PriceVolume + (*Trade)(nil), // 13: bbgo.Trade + (*Ticker)(nil), // 14: bbgo.Ticker + (*Order)(nil), // 15: bbgo.Order + (*SubmitOrder)(nil), // 16: bbgo.SubmitOrder + (*Balance)(nil), // 17: bbgo.Balance + (*SubmitOrderRequest)(nil), // 18: bbgo.SubmitOrderRequest + (*SubmitOrderResponse)(nil), // 19: bbgo.SubmitOrderResponse + (*CancelOrderRequest)(nil), // 20: bbgo.CancelOrderRequest + (*CancelOrderResponse)(nil), // 21: bbgo.CancelOrderResponse + (*QueryOrderRequest)(nil), // 22: bbgo.QueryOrderRequest + (*QueryOrderResponse)(nil), // 23: bbgo.QueryOrderResponse + (*QueryOrdersRequest)(nil), // 24: bbgo.QueryOrdersRequest + (*QueryOrdersResponse)(nil), // 25: bbgo.QueryOrdersResponse + (*QueryTradesRequest)(nil), // 26: bbgo.QueryTradesRequest + (*QueryTradesResponse)(nil), // 27: bbgo.QueryTradesResponse + (*QueryKLinesRequest)(nil), // 28: bbgo.QueryKLinesRequest + (*QueryKLinesResponse)(nil), // 29: bbgo.QueryKLinesResponse + (*KLine)(nil), // 30: bbgo.KLine +} +var file_pkg_pb_bbgo_proto_depIdxs = []int32{ + 1, // 0: bbgo.UserData.channel:type_name -> bbgo.Channel + 0, // 1: bbgo.UserData.event:type_name -> bbgo.Event + 17, // 2: bbgo.UserData.balances:type_name -> bbgo.Balance + 13, // 3: bbgo.UserData.trades:type_name -> bbgo.Trade + 15, // 4: bbgo.UserData.orders:type_name -> bbgo.Order + 9, // 5: bbgo.SubscribeRequest.subscriptions:type_name -> bbgo.Subscription + 1, // 6: bbgo.Subscription.channel:type_name -> bbgo.Channel + 1, // 7: bbgo.MarketData.channel:type_name -> bbgo.Channel + 0, // 8: bbgo.MarketData.event:type_name -> bbgo.Event + 11, // 9: bbgo.MarketData.depth:type_name -> bbgo.Depth + 30, // 10: bbgo.MarketData.kline:type_name -> bbgo.KLine + 14, // 11: bbgo.MarketData.ticker:type_name -> bbgo.Ticker + 13, // 12: bbgo.MarketData.trades:type_name -> bbgo.Trade + 5, // 13: bbgo.MarketData.error:type_name -> bbgo.Error + 12, // 14: bbgo.Depth.asks:type_name -> bbgo.PriceVolume + 12, // 15: bbgo.Depth.bids:type_name -> bbgo.PriceVolume + 2, // 16: bbgo.Trade.side:type_name -> bbgo.Side + 2, // 17: bbgo.Order.side:type_name -> bbgo.Side + 3, // 18: bbgo.Order.order_type:type_name -> bbgo.OrderType + 2, // 19: bbgo.SubmitOrder.side:type_name -> bbgo.Side + 3, // 20: bbgo.SubmitOrder.order_type:type_name -> bbgo.OrderType + 16, // 21: bbgo.SubmitOrderRequest.submit_orders:type_name -> bbgo.SubmitOrder + 15, // 22: bbgo.SubmitOrderResponse.orders:type_name -> bbgo.Order + 5, // 23: bbgo.SubmitOrderResponse.error:type_name -> bbgo.Error + 15, // 24: bbgo.CancelOrderResponse.order:type_name -> bbgo.Order + 5, // 25: bbgo.CancelOrderResponse.error:type_name -> bbgo.Error + 15, // 26: bbgo.QueryOrderResponse.order:type_name -> bbgo.Order + 5, // 27: bbgo.QueryOrderResponse.error:type_name -> bbgo.Error + 15, // 28: bbgo.QueryOrdersResponse.orders:type_name -> bbgo.Order + 5, // 29: bbgo.QueryOrdersResponse.error:type_name -> bbgo.Error + 13, // 30: bbgo.QueryTradesResponse.trades:type_name -> bbgo.Trade + 5, // 31: bbgo.QueryTradesResponse.error:type_name -> bbgo.Error + 30, // 32: bbgo.QueryKLinesResponse.klines:type_name -> bbgo.KLine + 5, // 33: bbgo.QueryKLinesResponse.error:type_name -> bbgo.Error + 8, // 34: bbgo.MarketDataService.Subscribe:input_type -> bbgo.SubscribeRequest + 28, // 35: bbgo.MarketDataService.QueryKLines:input_type -> bbgo.QueryKLinesRequest + 6, // 36: bbgo.UserDataService.Subscribe:input_type -> bbgo.UserDataRequest + 18, // 37: bbgo.TradingService.SubmitOrder:input_type -> bbgo.SubmitOrderRequest + 20, // 38: bbgo.TradingService.CancelOrder:input_type -> bbgo.CancelOrderRequest + 22, // 39: bbgo.TradingService.QueryOrder:input_type -> bbgo.QueryOrderRequest + 24, // 40: bbgo.TradingService.QueryOrders:input_type -> bbgo.QueryOrdersRequest + 26, // 41: bbgo.TradingService.QueryTrades:input_type -> bbgo.QueryTradesRequest + 10, // 42: bbgo.MarketDataService.Subscribe:output_type -> bbgo.MarketData + 29, // 43: bbgo.MarketDataService.QueryKLines:output_type -> bbgo.QueryKLinesResponse + 7, // 44: bbgo.UserDataService.Subscribe:output_type -> bbgo.UserData + 19, // 45: bbgo.TradingService.SubmitOrder:output_type -> bbgo.SubmitOrderResponse + 21, // 46: bbgo.TradingService.CancelOrder:output_type -> bbgo.CancelOrderResponse + 23, // 47: bbgo.TradingService.QueryOrder:output_type -> bbgo.QueryOrderResponse + 25, // 48: bbgo.TradingService.QueryOrders:output_type -> bbgo.QueryOrdersResponse + 27, // 49: bbgo.TradingService.QueryTrades:output_type -> bbgo.QueryTradesResponse + 42, // [42:50] is the sub-list for method output_type + 34, // [34:42] is the sub-list for method input_type + 34, // [34:34] is the sub-list for extension type_name + 34, // [34:34] is the sub-list for extension extendee + 0, // [0:34] is the sub-list for field type_name +} + +func init() { file_pkg_pb_bbgo_proto_init() } +func file_pkg_pb_bbgo_proto_init() { + if File_pkg_pb_bbgo_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_pkg_pb_bbgo_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Empty); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Error); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UserDataRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UserData); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SubscribeRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Subscription); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MarketData); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Depth); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PriceVolume); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Trade); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Ticker); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Order); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SubmitOrder); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Balance); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SubmitOrderRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SubmitOrderResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CancelOrderRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CancelOrderResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryOrderRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryOrderResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryOrdersRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryOrdersResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryTradesRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryTradesResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryKLinesRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryKLinesResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_pb_bbgo_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*KLine); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_pkg_pb_bbgo_proto_rawDesc, + NumEnums: 4, + NumMessages: 27, + NumExtensions: 0, + NumServices: 3, + }, + GoTypes: file_pkg_pb_bbgo_proto_goTypes, + DependencyIndexes: file_pkg_pb_bbgo_proto_depIdxs, + EnumInfos: file_pkg_pb_bbgo_proto_enumTypes, + MessageInfos: file_pkg_pb_bbgo_proto_msgTypes, + }.Build() + File_pkg_pb_bbgo_proto = out.File + file_pkg_pb_bbgo_proto_rawDesc = nil + file_pkg_pb_bbgo_proto_goTypes = nil + file_pkg_pb_bbgo_proto_depIdxs = nil +} diff --git a/pkg/pb/bbgo.proto b/pkg/pb/bbgo.proto new file mode 100644 index 0000000000..bbe1ab5e26 --- /dev/null +++ b/pkg/pb/bbgo.proto @@ -0,0 +1,280 @@ +syntax = "proto3"; + +package bbgo; + +option go_package = "../pb"; + +service MarketDataService { + rpc Subscribe(SubscribeRequest) returns (stream MarketData) {} + rpc QueryKLines(QueryKLinesRequest) returns (QueryKLinesResponse) {} +} + +service UserDataService { + rpc Subscribe(UserDataRequest) returns (stream UserData) {} +} + +service TradingService { + // request-response + rpc SubmitOrder(SubmitOrderRequest) returns (SubmitOrderResponse) {} + rpc CancelOrder(CancelOrderRequest) returns (CancelOrderResponse) {} + rpc QueryOrder(QueryOrderRequest) returns (QueryOrderResponse) {} + rpc QueryOrders(QueryOrdersRequest) returns (QueryOrdersResponse) {} + rpc QueryTrades(QueryTradesRequest) returns (QueryTradesResponse) {} +} + +enum Event { + UNKNOWN = 0; + SUBSCRIBED = 1; + UNSUBSCRIBED = 2; + SNAPSHOT = 3; + UPDATE = 4; + AUTHENTICATED = 5; + ERROR = 99; +} + +enum Channel { + BOOK = 0; + TRADE = 1; + TICKER = 2; + KLINE = 3; + BALANCE = 4; + ORDER = 5; +} + +enum Side { + BUY = 0; + SELL = 1; +} + +enum OrderType { + MARKET = 0; + LIMIT = 1; + STOP_MARKET = 2; + STOP_LIMIT = 3; + POST_ONLY = 4; + IOC_LIMIT = 5; +} + +message Empty {} + +message Error { + int64 error_code = 1; + string error_message = 2; +} + +message UserDataRequest { + string session = 1; +} + +message UserData { + string session = 1; + string exchange = 2; + Channel channel = 3; // trade, order, balance + Event event = 4; // snapshot, update ... + repeated Balance balances = 5; + repeated Trade trades = 6; + repeated Order orders = 7; +} + +message SubscribeRequest { + repeated Subscription subscriptions = 1; +} + +message Subscription { + string exchange = 1; + Channel channel = 2; // book, trade, ticker + string symbol = 3; + string depth = 4; // depth is for book, valid values are full, medium, 1, 5 and 20 + string interval = 5; // interval is for kline channel +} + +message MarketData { + string session = 1; + string exchange = 2; + string symbol = 3; + Channel channel = 4; // book, trade, ticker, user + Event event = 5; // snapshot or update + Depth depth = 6; // depth: used by book + KLine kline = 7; + Ticker ticker = 9; // market ticker + repeated Trade trades = 8; // market trades + int64 subscribed_at = 12; + Error error = 13; +} + + +message Depth { + string exchange = 1; + string symbol = 2; + repeated PriceVolume asks = 3; + repeated PriceVolume bids = 4; +} + +message PriceVolume { + string price = 1; + string volume = 2; +} + +// https://maicoin.github.io/max-websocket-docs/#/private_channels?id=trade-response +// https://maicoin.github.io/max-websocket-docs/#/public_trade?id=success-response +message Trade { + string session = 1; + string exchange = 2; + string symbol = 3; + string id = 4; + string price = 5; + string quantity = 6; + int64 created_at = 7; + Side side = 8; + string fee_currency = 9; + string fee = 10; + bool maker = 11; +} + +// https://maicoin.github.io/max-websocket-docs/#/public_ticker?id=success-response +message Ticker { + string exchange = 1; + string symbol = 2; + double open = 3; + double high = 4; + double low = 5; + double close = 6; + double volume = 7; +} + +// https://maicoin.github.io/max-websocket-docs/#/private_channels?id=snapshot +message Order { + string exchange = 1; + string symbol = 2; + string id = 3; + Side side = 4; + OrderType order_type = 5; + string price = 6; + string stop_price = 7; + string status = 9; + string quantity = 11; + string executed_quantity = 12; + string client_order_id = 14; + int64 group_id = 15; + int64 created_at = 10; +} + +message SubmitOrder { + string session = 1; + string exchange = 2; + string symbol = 3; + Side side = 4; + string price = 6; + string quantity = 5; + string stop_price = 7; + OrderType order_type = 8; + string client_order_id = 9; + int64 group_id = 10; +} + +// https://maicoin.github.io/max-websocket-docs/#/private_channels?id=account-response +message Balance { + string session = 1; + string exchange = 2; + string currency = 3; + string available = 4; + string locked = 5; + string borrowed = 6; +} + +message SubmitOrderRequest { + string session = 1; + repeated SubmitOrder submit_orders = 2; +} + +message SubmitOrderResponse { + string session = 1; + repeated Order orders = 2; + Error error = 3; +} + +message CancelOrderRequest { + string session = 1; + string order_id = 2; + string client_order_id = 3; +} + +message CancelOrderResponse { + Order order = 1; + Error error = 2; +} + +message QueryOrderRequest { + string session = 1; + string id = 2; + string client_order_id = 3; +} + +message QueryOrderResponse { + Order order = 1; + Error error = 2; +} + +message QueryOrdersRequest { + string session = 1; + string symbol = 2; + repeated string state = 3; + string order_by = 4; + int64 group_id = 5; + bool pagination = 6; + int64 page = 7; + int64 limit = 8; + int64 offset = 9; +} + +message QueryOrdersResponse { + repeated Order orders = 1; + Error error = 2; +} + +message QueryTradesRequest { + string exchange = 1; + string symbol = 2; + int64 timestamp = 3; + int64 from = 4; + int64 to = 5; + string order_by = 6; + bool pagination = 7; + int64 page = 8; + int64 limit = 9; + int64 offset = 10; +} + +message QueryTradesResponse { + repeated Trade trades = 1; + Error error = 2; +} + +message QueryKLinesRequest { + string exchange = 1; + string symbol = 2; + string interval = 3; // time period of K line in minute + int64 start_time = 4; + int64 end_time = 5; + int64 limit = 6; +} + +message QueryKLinesResponse { + repeated KLine klines = 1; + Error error = 2; +} + +message KLine { + string session = 1; + string exchange = 2; + string symbol = 3; + string open = 4; + string high = 5; + string low = 6; + string close = 7; + string volume = 8; + string quote_volume = 9; + int64 start_time = 10; + int64 end_time = 11; + bool closed = 12; +} diff --git a/pkg/pb/bbgo_grpc.pb.go b/pkg/pb/bbgo_grpc.pb.go new file mode 100644 index 0000000000..cee6dc2e0b --- /dev/null +++ b/pkg/pb/bbgo_grpc.pb.go @@ -0,0 +1,510 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. + +package pb + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +// MarketDataServiceClient is the client API for MarketDataService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type MarketDataServiceClient interface { + Subscribe(ctx context.Context, in *SubscribeRequest, opts ...grpc.CallOption) (MarketDataService_SubscribeClient, error) + QueryKLines(ctx context.Context, in *QueryKLinesRequest, opts ...grpc.CallOption) (*QueryKLinesResponse, error) +} + +type marketDataServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewMarketDataServiceClient(cc grpc.ClientConnInterface) MarketDataServiceClient { + return &marketDataServiceClient{cc} +} + +func (c *marketDataServiceClient) Subscribe(ctx context.Context, in *SubscribeRequest, opts ...grpc.CallOption) (MarketDataService_SubscribeClient, error) { + stream, err := c.cc.NewStream(ctx, &MarketDataService_ServiceDesc.Streams[0], "/bbgo.MarketDataService/Subscribe", opts...) + if err != nil { + return nil, err + } + x := &marketDataServiceSubscribeClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type MarketDataService_SubscribeClient interface { + Recv() (*MarketData, error) + grpc.ClientStream +} + +type marketDataServiceSubscribeClient struct { + grpc.ClientStream +} + +func (x *marketDataServiceSubscribeClient) Recv() (*MarketData, error) { + m := new(MarketData) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *marketDataServiceClient) QueryKLines(ctx context.Context, in *QueryKLinesRequest, opts ...grpc.CallOption) (*QueryKLinesResponse, error) { + out := new(QueryKLinesResponse) + err := c.cc.Invoke(ctx, "/bbgo.MarketDataService/QueryKLines", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// MarketDataServiceServer is the server API for MarketDataService service. +// All implementations must embed UnimplementedMarketDataServiceServer +// for forward compatibility +type MarketDataServiceServer interface { + Subscribe(*SubscribeRequest, MarketDataService_SubscribeServer) error + QueryKLines(context.Context, *QueryKLinesRequest) (*QueryKLinesResponse, error) + mustEmbedUnimplementedMarketDataServiceServer() +} + +// UnimplementedMarketDataServiceServer must be embedded to have forward compatible implementations. +type UnimplementedMarketDataServiceServer struct { +} + +func (UnimplementedMarketDataServiceServer) Subscribe(*SubscribeRequest, MarketDataService_SubscribeServer) error { + return status.Errorf(codes.Unimplemented, "method Subscribe not implemented") +} +func (UnimplementedMarketDataServiceServer) QueryKLines(context.Context, *QueryKLinesRequest) (*QueryKLinesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method QueryKLines not implemented") +} +func (UnimplementedMarketDataServiceServer) mustEmbedUnimplementedMarketDataServiceServer() {} + +// UnsafeMarketDataServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to MarketDataServiceServer will +// result in compilation errors. +type UnsafeMarketDataServiceServer interface { + mustEmbedUnimplementedMarketDataServiceServer() +} + +func RegisterMarketDataServiceServer(s grpc.ServiceRegistrar, srv MarketDataServiceServer) { + s.RegisterService(&MarketDataService_ServiceDesc, srv) +} + +func _MarketDataService_Subscribe_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(SubscribeRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(MarketDataServiceServer).Subscribe(m, &marketDataServiceSubscribeServer{stream}) +} + +type MarketDataService_SubscribeServer interface { + Send(*MarketData) error + grpc.ServerStream +} + +type marketDataServiceSubscribeServer struct { + grpc.ServerStream +} + +func (x *marketDataServiceSubscribeServer) Send(m *MarketData) error { + return x.ServerStream.SendMsg(m) +} + +func _MarketDataService_QueryKLines_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueryKLinesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MarketDataServiceServer).QueryKLines(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/bbgo.MarketDataService/QueryKLines", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MarketDataServiceServer).QueryKLines(ctx, req.(*QueryKLinesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// MarketDataService_ServiceDesc is the grpc.ServiceDesc for MarketDataService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var MarketDataService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "bbgo.MarketDataService", + HandlerType: (*MarketDataServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "QueryKLines", + Handler: _MarketDataService_QueryKLines_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "Subscribe", + Handler: _MarketDataService_Subscribe_Handler, + ServerStreams: true, + }, + }, + Metadata: "pkg/pb/bbgo.proto", +} + +// UserDataServiceClient is the client API for UserDataService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type UserDataServiceClient interface { + Subscribe(ctx context.Context, in *UserDataRequest, opts ...grpc.CallOption) (UserDataService_SubscribeClient, error) +} + +type userDataServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewUserDataServiceClient(cc grpc.ClientConnInterface) UserDataServiceClient { + return &userDataServiceClient{cc} +} + +func (c *userDataServiceClient) Subscribe(ctx context.Context, in *UserDataRequest, opts ...grpc.CallOption) (UserDataService_SubscribeClient, error) { + stream, err := c.cc.NewStream(ctx, &UserDataService_ServiceDesc.Streams[0], "/bbgo.UserDataService/Subscribe", opts...) + if err != nil { + return nil, err + } + x := &userDataServiceSubscribeClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type UserDataService_SubscribeClient interface { + Recv() (*UserData, error) + grpc.ClientStream +} + +type userDataServiceSubscribeClient struct { + grpc.ClientStream +} + +func (x *userDataServiceSubscribeClient) Recv() (*UserData, error) { + m := new(UserData) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// UserDataServiceServer is the server API for UserDataService service. +// All implementations must embed UnimplementedUserDataServiceServer +// for forward compatibility +type UserDataServiceServer interface { + Subscribe(*UserDataRequest, UserDataService_SubscribeServer) error + mustEmbedUnimplementedUserDataServiceServer() +} + +// UnimplementedUserDataServiceServer must be embedded to have forward compatible implementations. +type UnimplementedUserDataServiceServer struct { +} + +func (UnimplementedUserDataServiceServer) Subscribe(*UserDataRequest, UserDataService_SubscribeServer) error { + return status.Errorf(codes.Unimplemented, "method Subscribe not implemented") +} +func (UnimplementedUserDataServiceServer) mustEmbedUnimplementedUserDataServiceServer() {} + +// UnsafeUserDataServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to UserDataServiceServer will +// result in compilation errors. +type UnsafeUserDataServiceServer interface { + mustEmbedUnimplementedUserDataServiceServer() +} + +func RegisterUserDataServiceServer(s grpc.ServiceRegistrar, srv UserDataServiceServer) { + s.RegisterService(&UserDataService_ServiceDesc, srv) +} + +func _UserDataService_Subscribe_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(UserDataRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(UserDataServiceServer).Subscribe(m, &userDataServiceSubscribeServer{stream}) +} + +type UserDataService_SubscribeServer interface { + Send(*UserData) error + grpc.ServerStream +} + +type userDataServiceSubscribeServer struct { + grpc.ServerStream +} + +func (x *userDataServiceSubscribeServer) Send(m *UserData) error { + return x.ServerStream.SendMsg(m) +} + +// UserDataService_ServiceDesc is the grpc.ServiceDesc for UserDataService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var UserDataService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "bbgo.UserDataService", + HandlerType: (*UserDataServiceServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "Subscribe", + Handler: _UserDataService_Subscribe_Handler, + ServerStreams: true, + }, + }, + Metadata: "pkg/pb/bbgo.proto", +} + +// TradingServiceClient is the client API for TradingService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type TradingServiceClient interface { + // request-response + SubmitOrder(ctx context.Context, in *SubmitOrderRequest, opts ...grpc.CallOption) (*SubmitOrderResponse, error) + CancelOrder(ctx context.Context, in *CancelOrderRequest, opts ...grpc.CallOption) (*CancelOrderResponse, error) + QueryOrder(ctx context.Context, in *QueryOrderRequest, opts ...grpc.CallOption) (*QueryOrderResponse, error) + QueryOrders(ctx context.Context, in *QueryOrdersRequest, opts ...grpc.CallOption) (*QueryOrdersResponse, error) + QueryTrades(ctx context.Context, in *QueryTradesRequest, opts ...grpc.CallOption) (*QueryTradesResponse, error) +} + +type tradingServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewTradingServiceClient(cc grpc.ClientConnInterface) TradingServiceClient { + return &tradingServiceClient{cc} +} + +func (c *tradingServiceClient) SubmitOrder(ctx context.Context, in *SubmitOrderRequest, opts ...grpc.CallOption) (*SubmitOrderResponse, error) { + out := new(SubmitOrderResponse) + err := c.cc.Invoke(ctx, "/bbgo.TradingService/SubmitOrder", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *tradingServiceClient) CancelOrder(ctx context.Context, in *CancelOrderRequest, opts ...grpc.CallOption) (*CancelOrderResponse, error) { + out := new(CancelOrderResponse) + err := c.cc.Invoke(ctx, "/bbgo.TradingService/CancelOrder", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *tradingServiceClient) QueryOrder(ctx context.Context, in *QueryOrderRequest, opts ...grpc.CallOption) (*QueryOrderResponse, error) { + out := new(QueryOrderResponse) + err := c.cc.Invoke(ctx, "/bbgo.TradingService/QueryOrder", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *tradingServiceClient) QueryOrders(ctx context.Context, in *QueryOrdersRequest, opts ...grpc.CallOption) (*QueryOrdersResponse, error) { + out := new(QueryOrdersResponse) + err := c.cc.Invoke(ctx, "/bbgo.TradingService/QueryOrders", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *tradingServiceClient) QueryTrades(ctx context.Context, in *QueryTradesRequest, opts ...grpc.CallOption) (*QueryTradesResponse, error) { + out := new(QueryTradesResponse) + err := c.cc.Invoke(ctx, "/bbgo.TradingService/QueryTrades", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// TradingServiceServer is the server API for TradingService service. +// All implementations must embed UnimplementedTradingServiceServer +// for forward compatibility +type TradingServiceServer interface { + // request-response + SubmitOrder(context.Context, *SubmitOrderRequest) (*SubmitOrderResponse, error) + CancelOrder(context.Context, *CancelOrderRequest) (*CancelOrderResponse, error) + QueryOrder(context.Context, *QueryOrderRequest) (*QueryOrderResponse, error) + QueryOrders(context.Context, *QueryOrdersRequest) (*QueryOrdersResponse, error) + QueryTrades(context.Context, *QueryTradesRequest) (*QueryTradesResponse, error) + mustEmbedUnimplementedTradingServiceServer() +} + +// UnimplementedTradingServiceServer must be embedded to have forward compatible implementations. +type UnimplementedTradingServiceServer struct { +} + +func (UnimplementedTradingServiceServer) SubmitOrder(context.Context, *SubmitOrderRequest) (*SubmitOrderResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method SubmitOrder not implemented") +} +func (UnimplementedTradingServiceServer) CancelOrder(context.Context, *CancelOrderRequest) (*CancelOrderResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method CancelOrder not implemented") +} +func (UnimplementedTradingServiceServer) QueryOrder(context.Context, *QueryOrderRequest) (*QueryOrderResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method QueryOrder not implemented") +} +func (UnimplementedTradingServiceServer) QueryOrders(context.Context, *QueryOrdersRequest) (*QueryOrdersResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method QueryOrders not implemented") +} +func (UnimplementedTradingServiceServer) QueryTrades(context.Context, *QueryTradesRequest) (*QueryTradesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method QueryTrades not implemented") +} +func (UnimplementedTradingServiceServer) mustEmbedUnimplementedTradingServiceServer() {} + +// UnsafeTradingServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to TradingServiceServer will +// result in compilation errors. +type UnsafeTradingServiceServer interface { + mustEmbedUnimplementedTradingServiceServer() +} + +func RegisterTradingServiceServer(s grpc.ServiceRegistrar, srv TradingServiceServer) { + s.RegisterService(&TradingService_ServiceDesc, srv) +} + +func _TradingService_SubmitOrder_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SubmitOrderRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TradingServiceServer).SubmitOrder(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/bbgo.TradingService/SubmitOrder", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TradingServiceServer).SubmitOrder(ctx, req.(*SubmitOrderRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TradingService_CancelOrder_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CancelOrderRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TradingServiceServer).CancelOrder(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/bbgo.TradingService/CancelOrder", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TradingServiceServer).CancelOrder(ctx, req.(*CancelOrderRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TradingService_QueryOrder_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueryOrderRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TradingServiceServer).QueryOrder(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/bbgo.TradingService/QueryOrder", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TradingServiceServer).QueryOrder(ctx, req.(*QueryOrderRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TradingService_QueryOrders_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueryOrdersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TradingServiceServer).QueryOrders(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/bbgo.TradingService/QueryOrders", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TradingServiceServer).QueryOrders(ctx, req.(*QueryOrdersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TradingService_QueryTrades_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueryTradesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TradingServiceServer).QueryTrades(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/bbgo.TradingService/QueryTrades", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TradingServiceServer).QueryTrades(ctx, req.(*QueryTradesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// TradingService_ServiceDesc is the grpc.ServiceDesc for TradingService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var TradingService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "bbgo.TradingService", + HandlerType: (*TradingServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "SubmitOrder", + Handler: _TradingService_SubmitOrder_Handler, + }, + { + MethodName: "CancelOrder", + Handler: _TradingService_CancelOrder_Handler, + }, + { + MethodName: "QueryOrder", + Handler: _TradingService_QueryOrder_Handler, + }, + { + MethodName: "QueryOrders", + Handler: _TradingService_QueryOrders_Handler, + }, + { + MethodName: "QueryTrades", + Handler: _TradingService_QueryTrades_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "pkg/pb/bbgo.proto", +} diff --git a/pkg/server/asset_fs.go b/pkg/server/asset_fs.go new file mode 100644 index 0000000000..43b2eec68a --- /dev/null +++ b/pkg/server/asset_fs.go @@ -0,0 +1,22 @@ +//go:build web + +package server + +import ( + "net/http" + + "github.com/gin-gonic/gin" +) + +func (s *Server) assetsHandler(c *gin.Context) { + // redirect to .html page if the page exists + if pageRoutePattern.MatchString(c.Request.URL.Path) { + _, err := FS.Open(c.Request.URL.Path + ".html") + if err == nil { + c.Request.URL.Path += ".html" + } + } + + fs := http.FileServer(FS) + fs.ServeHTTP(c.Writer, c.Request) +} diff --git a/pkg/server/assets_dummy.go b/pkg/server/assets_dummy.go new file mode 100644 index 0000000000..4e50d2fd73 --- /dev/null +++ b/pkg/server/assets_dummy.go @@ -0,0 +1,9 @@ +//go:build !web + +package server + +import ( + "github.com/gin-gonic/gin" +) + +func (s *Server) assetsHandler(c *gin.Context) {} diff --git a/pkg/server/envvars.go b/pkg/server/envvars.go new file mode 100644 index 0000000000..f11a418f93 --- /dev/null +++ b/pkg/server/envvars.go @@ -0,0 +1,41 @@ +package server + +import ( + "fmt" + "strings" + + "github.com/c9s/bbgo/pkg/bbgo" +) + +func collectSessionEnvVars(sessions map[string]*bbgo.ExchangeSession) (envVars map[string]string, err error) { + envVars = make(map[string]string) + + for _, session := range sessions { + if len(session.Key) == 0 && len(session.Secret) == 0 { + err = fmt.Errorf("session %s key & secret is not empty", session.Name) + return + } + + if len(session.EnvVarPrefix) > 0 { + // pragma: allowlist nextline secret + envVars[session.EnvVarPrefix+"_API_KEY"] = session.Key + // pragma: allowlist nextline secret + envVars[session.EnvVarPrefix+"_API_SECRET"] = session.Secret + } else if len(session.Name) > 0 { + sn := strings.ToUpper(session.Name) + // pragma: allowlist nextline secret + envVars[sn+"_API_KEY"] = session.Key + // pragma: allowlist nextline secret + envVars[sn+"_API_SECRET"] = session.Secret + } else { + err = fmt.Errorf("session %s name or env var prefix is not defined", session.Name) + return + } + + // reset key and secret so that we won't marshal them to the config file + session.Key = "" + session.Secret = "" + } + + return +} diff --git a/pkg/server/ping.go b/pkg/server/ping.go new file mode 100644 index 0000000000..39de99b4df --- /dev/null +++ b/pkg/server/ping.go @@ -0,0 +1,45 @@ +package server + +import ( + "context" + "time" + + "github.com/sirupsen/logrus" +) + +func PingUntil(ctx context.Context, interval time.Duration, baseURL string, callback func()) { + pingURL := baseURL + "/api/ping" + timeout := time.NewTimer(3 * time.Minute) + + ticker := time.NewTicker(interval) + defer ticker.Stop() + + for { + select { + + case <-timeout.C: + logrus.Warnf("ping hits 1 minute timeout") + return + + case <-ctx.Done(): + return + + case <-ticker.C: + var response map[string]interface{} + var err = getJSON(pingURL, &response) + if err == nil { + callback() + return + } + } + } +} + +func pingAndOpenURL(ctx context.Context, baseURL string) { + setupURL := baseURL + "/setup" + go PingUntil(ctx, time.Second, baseURL, func() { + if err := openURL(setupURL); err != nil { + logrus.WithError(err).Errorf("can not call open command to open the web page") + } + }) +} diff --git a/pkg/server/routes.go b/pkg/server/routes.go new file mode 100644 index 0000000000..6502c17e1c --- /dev/null +++ b/pkg/server/routes.go @@ -0,0 +1,650 @@ +package server + +import ( + "context" + "fmt" + "io/ioutil" + "math/rand" + "net" + "net/http" + "os" + "regexp" + "strconv" + "time" + + "github.com/gin-contrib/cors" + "github.com/gin-gonic/gin" + "github.com/joho/godotenv" + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/service" + "github.com/c9s/bbgo/pkg/types" +) + +const DefaultBindAddress = "localhost:8080" + +type Setup struct { + // Context is the trader context + Context context.Context + + // Cancel is the trader context cancel function you want to cancel + Cancel context.CancelFunc + + // Token is used for setup api authentication + Token string + + BeforeRestart func() +} + +type Server struct { + Config *bbgo.Config + Environ *bbgo.Environment + Trader *bbgo.Trader + Setup *Setup + OpenInBrowser bool + + srv *http.Server +} + +func (s *Server) newEngine(ctx context.Context) *gin.Engine { + r := gin.Default() + r.Use(cors.New(cors.Config{ + AllowOrigins: []string{"*"}, + AllowHeaders: []string{"Origin", "Content-Type"}, + ExposeHeaders: []string{"Content-Length"}, + AllowMethods: []string{"GET", "POST", "PUT", "DELETE"}, + AllowWebSockets: true, + AllowCredentials: true, + MaxAge: 12 * time.Hour, + })) + + r.GET("/api/ping", s.ping) + + if s.Setup != nil { + r.POST("/api/setup/test-db", s.setupTestDB) + r.POST("/api/setup/configure-db", s.setupConfigureDB) + r.POST("/api/setup/strategy/single/:id/session/:session", s.setupAddStrategy) + r.POST("/api/setup/save", s.setupSaveConfig) + r.POST("/api/setup/restart", s.setupRestart) + } + + r.GET("/api/environment/syncing", func(c *gin.Context) { + c.JSON(http.StatusOK, gin.H{ + "syncing": s.Environ.IsSyncing(), + }) + }) + + r.POST("/api/environment/sync", func(c *gin.Context) { + if s.Environ.IsSyncing() != bbgo.Syncing { + go func() { + // We use the root context here because the syncing operation is a background goroutine. + // It should not be terminated if the request is disconnected. + if err := s.Environ.Sync(ctx); err != nil { + logrus.WithError(err).Error("sync error") + } + }() + } + + c.JSON(http.StatusOK, gin.H{ + "success": true, + }) + }) + + r.GET("/api/outbound-ip", func(c *gin.Context) { + outboundIP, err := GetOutboundIP() + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{ + "error": err.Error(), + }) + } + + c.JSON(http.StatusOK, gin.H{ + "outboundIP": outboundIP.String(), + }) + }) + + r.GET("/api/trades", func(c *gin.Context) { + if s.Environ.TradeService == nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "database is not configured"}) + return + } + + exchange := c.Query("exchange") + symbol := c.Query("symbol") + gidStr := c.DefaultQuery("gid", "0") + lastGID, err := strconv.ParseInt(gidStr, 10, 64) + if err != nil { + logrus.WithError(err).Error("last gid parse error") + c.Status(http.StatusBadRequest) + return + } + + trades, err := s.Environ.TradeService.Query(service.QueryTradesOptions{ + Exchange: types.ExchangeName(exchange), + Symbol: symbol, + LastGID: lastGID, + Ordering: "DESC", + }) + if err != nil { + c.Status(http.StatusBadRequest) + logrus.WithError(err).Error("order query error") + return + } + + c.JSON(http.StatusOK, gin.H{ + "trades": trades, + }) + }) + + r.GET("/api/orders/closed", s.listClosedOrders) + r.GET("/api/trading-volume", s.tradingVolume) + + r.POST("/api/sessions/test", func(c *gin.Context) { + var session bbgo.ExchangeSession + if err := c.BindJSON(&session); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{ + "error": err.Error(), + }) + return + } + + err := session.InitExchange(session.ExchangeName.String(), nil) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{ + "error": err.Error(), + }) + return + } + + var anyErr error + _, openOrdersErr := session.Exchange.QueryOpenOrders(c, "BTCUSDT") + if openOrdersErr != nil { + anyErr = openOrdersErr + } + + _, balanceErr := session.Exchange.QueryAccountBalances(c) + if balanceErr != nil { + anyErr = balanceErr + } + + c.JSON(http.StatusOK, gin.H{ + "success": anyErr == nil, + "error": anyErr, + "balance": balanceErr == nil, + "openOrders": openOrdersErr == nil, + }) + }) + + r.GET("/api/sessions", func(c *gin.Context) { + var sessions []*bbgo.ExchangeSession + for _, session := range s.Environ.Sessions() { + sessions = append(sessions, session) + } + + if len(sessions) == 0 { + c.JSON(http.StatusOK, gin.H{"sessions": []int{}}) + } + + c.JSON(http.StatusOK, gin.H{"sessions": sessions}) + }) + + r.POST("/api/sessions", func(c *gin.Context) { + var session bbgo.ExchangeSession + if err := c.BindJSON(&session); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{ + "error": err.Error(), + }) + return + } + + if err := session.InitExchange(session.ExchangeName.String(), nil); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{ + "error": err.Error(), + }) + return + } + + if s.Config.Sessions == nil { + s.Config.Sessions = make(map[string]*bbgo.ExchangeSession) + } + s.Config.Sessions[session.Name] = &session + + s.Environ.AddExchangeSession(session.Name, &session) + + if err := session.Init(c, s.Environ); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{ + "error": err.Error(), + }) + return + } + + c.JSON(http.StatusOK, gin.H{"success": true}) + }) + + r.GET("/api/assets", s.listAssets) + r.GET("/api/sessions/:session", s.listSessions) + r.GET("/api/sessions/:session/trades", s.listSessionTrades) + r.GET("/api/sessions/:session/open-orders", s.listSessionOpenOrders) + r.GET("/api/sessions/:session/account", s.getSessionAccount) + r.GET("/api/sessions/:session/account/balances", s.getSessionAccountBalance) + r.GET("/api/sessions/:session/symbols", s.listSessionSymbols) + + r.GET("/api/sessions/:session/pnl", func(c *gin.Context) { + c.JSON(200, gin.H{"message": "pong"}) + }) + + r.GET("/api/sessions/:session/market/:symbol/open-orders", func(c *gin.Context) { + c.JSON(200, gin.H{"message": "pong"}) + }) + + r.GET("/api/sessions/:session/market/:symbol/trades", func(c *gin.Context) { + c.JSON(200, gin.H{"message": "pong"}) + }) + + r.GET("/api/sessions/:session/market/:symbol/pnl", func(c *gin.Context) { + c.JSON(200, gin.H{"message": "pong"}) + }) + + r.GET("/api/strategies/single", s.listStrategies) + r.NoRoute(s.assetsHandler) + return r +} + +func (s *Server) RunWithListener(ctx context.Context, l net.Listener) error { + r := s.newEngine(ctx) + bind := l.Addr().String() + + if s.OpenInBrowser { + openBrowser(ctx, bind) + } + + s.srv = newServer(r, bind) + return serve(s.srv, l) +} + +func (s *Server) Run(ctx context.Context, bindArgs ...string) error { + r := s.newEngine(ctx) + bind := resolveBind(bindArgs) + if s.OpenInBrowser { + openBrowser(ctx, bind) + } + + s.srv = newServer(r, bind) + return listenAndServe(s.srv) +} + +func (s *Server) ping(c *gin.Context) { + c.JSON(http.StatusOK, gin.H{"message": "pong"}) +} + +func (s *Server) listClosedOrders(c *gin.Context) { + if s.Environ.OrderService == nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "database is not configured"}) + return + } + + exchange := c.Query("exchange") + symbol := c.Query("symbol") + gidStr := c.DefaultQuery("gid", "0") + + lastGID, err := strconv.ParseInt(gidStr, 10, 64) + if err != nil { + logrus.WithError(err).Error("last gid parse error") + c.Status(http.StatusBadRequest) + return + } + + orders, err := s.Environ.OrderService.Query(service.QueryOrdersOptions{ + Exchange: types.ExchangeName(exchange), + Symbol: symbol, + LastGID: lastGID, + Ordering: "DESC", + }) + if err != nil { + c.Status(http.StatusBadRequest) + logrus.WithError(err).Error("order query error") + return + } + + c.JSON(http.StatusOK, gin.H{ + "orders": orders, + }) +} + +func (s *Server) listStrategies(c *gin.Context) { + var stashes []map[string]interface{} + + for _, mount := range s.Config.ExchangeStrategies { + stash, err := mount.Map() + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + stash["strategy"] = mount.Strategy.ID() + + stashes = append(stashes, stash) + } + + if len(stashes) == 0 { + c.JSON(http.StatusOK, gin.H{"strategies": []int{}}) + } + c.JSON(http.StatusOK, gin.H{"strategies": stashes}) +} + +func (s *Server) listSessions(c *gin.Context) { + sessionName := c.Param("session") + session, ok := s.Environ.Session(sessionName) + + if !ok { + c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("session %s not found", sessionName)}) + return + } + + c.JSON(http.StatusOK, gin.H{"session": session}) +} + +func (s *Server) listSessionSymbols(c *gin.Context) { + sessionName := c.Param("session") + session, ok := s.Environ.Session(sessionName) + + if !ok { + c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("session %s not found", sessionName)}) + return + } + + var symbols []string + for symbol := range session.Markets() { + symbols = append(symbols, symbol) + } + + c.JSON(http.StatusOK, gin.H{"symbols": symbols}) +} + +func (s *Server) listSessionTrades(c *gin.Context) { + sessionName := c.Param("session") + session, ok := s.Environ.Session(sessionName) + + if !ok { + c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("session %s not found", sessionName)}) + return + } + + c.JSON(http.StatusOK, gin.H{"trades": session.Trades}) +} + +func (s *Server) getSessionAccount(c *gin.Context) { + sessionName := c.Param("session") + session, ok := s.Environ.Session(sessionName) + + if !ok { + c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("session %s not found", sessionName)}) + return + } + + c.JSON(http.StatusOK, gin.H{"account": session.GetAccount()}) +} + +func (s *Server) getSessionAccountBalance(c *gin.Context) { + sessionName := c.Param("session") + session, ok := s.Environ.Session(sessionName) + + if !ok { + c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("session %s not found", sessionName)}) + return + } + + if session.Account == nil { + c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("the account of session %s is nil", sessionName)}) + return + } + + c.JSON(http.StatusOK, gin.H{"balances": session.GetAccount().Balances()}) +} + +func (s *Server) listSessionOpenOrders(c *gin.Context) { + sessionName := c.Param("session") + session, ok := s.Environ.Session(sessionName) + + if !ok { + c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("session %s not found", sessionName)}) + return + } + + marketOrders := make(map[string][]types.Order) + for symbol, orderStore := range session.OrderStores() { + marketOrders[symbol] = orderStore.Orders() + } + + c.JSON(http.StatusOK, gin.H{"orders": marketOrders}) +} + +func genFakeAssets() types.AssetMap { + + totalAssets := types.AssetMap{} + balances := types.BalanceMap{ + "BTC": types.Balance{Currency: "BTC", Available: fixedpoint.NewFromFloat(10.0 * rand.Float64())}, + "BCH": types.Balance{Currency: "BCH", Available: fixedpoint.NewFromFloat(0.01 * rand.Float64())}, + "LTC": types.Balance{Currency: "LTC", Available: fixedpoint.NewFromFloat(200.0 * rand.Float64())}, + "ETH": types.Balance{Currency: "ETH", Available: fixedpoint.NewFromFloat(50.0 * rand.Float64())}, + "SAND": types.Balance{Currency: "SAND", Available: fixedpoint.NewFromFloat(11500.0 * rand.Float64())}, + "BNB": types.Balance{Currency: "BNB", Available: fixedpoint.NewFromFloat(1000.0 * rand.Float64())}, + "GRT": types.Balance{Currency: "GRT", Available: fixedpoint.NewFromFloat(1000.0 * rand.Float64())}, + "MAX": types.Balance{Currency: "MAX", Available: fixedpoint.NewFromFloat(200000.0 * rand.Float64())}, + "COMP": types.Balance{Currency: "COMP", Available: fixedpoint.NewFromFloat(100.0 * rand.Float64())}, + } + assets := balances.Assets(map[string]fixedpoint.Value{ + "BTCUSDT": fixedpoint.NewFromFloat(38000.0), + "BCHUSDT": fixedpoint.NewFromFloat(478.0), + "LTCUSDT": fixedpoint.NewFromFloat(150.0), + "COMPUSDT": fixedpoint.NewFromFloat(450.0), + "ETHUSDT": fixedpoint.NewFromFloat(1700.0), + "BNBUSDT": fixedpoint.NewFromFloat(70.0), + "GRTUSDT": fixedpoint.NewFromFloat(0.89), + "DOTUSDT": fixedpoint.NewFromFloat(20.0), + "SANDUSDT": fixedpoint.NewFromFloat(0.13), + "MAXUSDT": fixedpoint.NewFromFloat(0.122), + }, time.Now()) + for currency, asset := range assets { + totalAssets[currency] = asset + } + + return totalAssets +} + +func (s *Server) listAssets(c *gin.Context) { + if ok, err := strconv.ParseBool(os.Getenv("USE_FAKE_ASSETS")); err == nil && ok { + c.JSON(http.StatusOK, gin.H{"assets": genFakeAssets()}) + return + } + + totalAssets := types.AssetMap{} + for _, session := range s.Environ.Sessions() { + balances := session.GetAccount().Balances() + + if err := session.UpdatePrices(c, balances.Currencies(), "USDT"); err != nil { + logrus.WithError(err).Error("price update failed") + c.Status(http.StatusInternalServerError) + return + } + + assets := balances.Assets(session.LastPrices(), time.Now()) + + for currency, asset := range assets { + totalAssets[currency] = asset + } + } + + c.JSON(http.StatusOK, gin.H{"assets": totalAssets}) +} + +func (s *Server) setupSaveConfig(c *gin.Context) { + if len(s.Config.Sessions) == 0 { + c.JSON(http.StatusBadRequest, gin.H{"error": "session is not configured"}) + return + } + + envVars, err := collectSessionEnvVars(s.Config.Sessions) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + if s.Environ.DatabaseService != nil { + envVars["DB_DRIVER"] = s.Environ.DatabaseService.Driver + envVars["DB_DSN"] = s.Environ.DatabaseService.DSN + } + + dotenvFile := ".env.local" + if err := moveFileToBackup(dotenvFile); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + if err := godotenv.Write(envVars, dotenvFile); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + out, err := s.Config.YAML() + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + fmt.Println("config file") + fmt.Println("=================================================") + fmt.Println(string(out)) + fmt.Println("=================================================") + + filename := "bbgo.yaml" + if err := moveFileToBackup(filename); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + if err := ioutil.WriteFile(filename, out, 0666); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + c.JSON(http.StatusOK, gin.H{"success": true}) +} + +var pageRoutePattern = regexp.MustCompile("/[a-z]+$") + +func moveFileToBackup(filename string) error { + stat, err := os.Stat(filename) + + if err == nil && stat != nil { + err := os.Rename(filename, filename+"."+time.Now().Format("20060102_150405_07_00")) + if err != nil { + return err + } + } + + return nil +} + +func (s *Server) tradingVolume(c *gin.Context) { + if s.Environ.TradeService == nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "database is not configured"}) + return + } + + period := c.DefaultQuery("period", "day") + segment := c.DefaultQuery("segment", "exchange") + startTimeStr := c.Query("start-time") + + var startTime time.Time + + if startTimeStr != "" { + v, err := time.Parse(time.RFC3339, startTimeStr) + if err != nil { + c.Status(http.StatusBadRequest) + logrus.WithError(err).Error("start-time format incorrect") + return + } + startTime = v + + } else { + switch period { + case "day": + startTime = time.Now().AddDate(0, 0, -30) + + case "month": + startTime = time.Now().AddDate(0, -6, 0) + + case "year": + startTime = time.Now().AddDate(-2, 0, 0) + + default: + startTime = time.Now().AddDate(0, 0, -7) + + } + } + + rows, err := s.Environ.TradeService.QueryTradingVolume(startTime, service.TradingVolumeQueryOptions{ + SegmentBy: segment, + GroupByPeriod: period, + }) + if err != nil { + logrus.WithError(err).Error("trading volume query error") + c.Status(http.StatusInternalServerError) + return + } + + c.JSON(http.StatusOK, gin.H{"tradingVolumes": rows}) +} + +func newServer(r http.Handler, bind string) *http.Server { + return &http.Server{ + Addr: bind, + Handler: r, + } +} + +func serve(srv *http.Server, l net.Listener) (err error) { + defer func() { + if err != nil && err != http.ErrServerClosed { + logrus.WithError(err).Error("unexpected http server error") + } + }() + + err = srv.Serve(l) + if err != http.ErrServerClosed { + return err + } + + return nil +} + +func listenAndServe(srv *http.Server) error { + var err error + + defer func() { + if err != nil && err != http.ErrServerClosed { + logrus.WithError(err).Error("unexpected http server error") + } + }() + + err = srv.ListenAndServe() + if err != http.ErrServerClosed { + return err + } + + return nil +} + +func GetOutboundIP() (net.IP, error) { + conn, err := net.Dial("udp", "8.8.8.8:80") + if err != nil { + return nil, err + } + defer conn.Close() + + localAddr := conn.LocalAddr().(*net.UDPAddr) + return localAddr.IP, nil +} diff --git a/pkg/server/setup.go b/pkg/server/setup.go new file mode 100644 index 0000000000..cf1f14f40f --- /dev/null +++ b/pkg/server/setup.go @@ -0,0 +1,159 @@ +package server + +import ( + "context" + "net/http" + "os" + "syscall" + "time" + + "github.com/gin-gonic/gin" + "github.com/jmoiron/sqlx" + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" +) + +func (s *Server) setupTestDB(c *gin.Context) { + payload := struct { + Driver string `json:"driver"` + DSN string `json:"dsn"` + }{} + + if err := c.BindJSON(&payload); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "missing arguments"}) + return + } + + if len(payload.Driver) == 0 { + c.JSON(http.StatusBadRequest, gin.H{"error": "missing driver parameter"}) + return + } + + if len(payload.DSN) == 0 { + c.JSON(http.StatusBadRequest, gin.H{"error": "missing dsn parameter"}) + return + } + + db, err := sqlx.Connect(payload.Driver, payload.DSN) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + if err := db.Close(); err != nil { + logrus.WithError(err).Error("db connection close error") + } + + c.JSON(http.StatusOK, gin.H{"success": true}) +} + +func (s *Server) setupConfigureDB(c *gin.Context) { + payload := struct { + Driver string `json:"driver"` + DSN string `json:"dsn"` + }{} + + if err := c.BindJSON(&payload); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "missing parameters"}) + return + } + + if len(payload.Driver) == 0 { + c.JSON(http.StatusBadRequest, gin.H{"error": "missing driver parameter"}) + return + } + + if len(payload.DSN) == 0 { + c.JSON(http.StatusBadRequest, gin.H{"error": "missing dsn parameter"}) + return + } + + if err := s.Environ.ConfigureDatabaseDriver(c, payload.Driver, payload.DSN); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + c.JSON(http.StatusOK, gin.H{ + "success": true, + "driver": payload.Driver, + "dsn": payload.DSN, + }) +} + +func (s *Server) setupAddStrategy(c *gin.Context) { + sessionName := c.Param("session") + strategyID := c.Param("id") + + _, ok := s.Environ.Session(sessionName) + if !ok { + c.JSON(http.StatusNotFound, "session not found") + return + } + + var conf map[string]interface{} + + if err := c.BindJSON(&conf); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "missing arguments"}) + return + } + + strategy, err := bbgo.NewStrategyFromMap(strategyID, conf) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + mount := bbgo.ExchangeStrategyMount{ + Mounts: []string{sessionName}, + Strategy: strategy, + } + + s.Config.ExchangeStrategies = append(s.Config.ExchangeStrategies, mount) + + c.JSON(http.StatusOK, gin.H{"success": true}) +} + +func (s *Server) setupRestart(c *gin.Context) { + if s.srv == nil { + logrus.Error("nil srv") + return + } + + go func() { + logrus.Info("shutting down web server...") + + // The context is used to inform the server it has 5 seconds to finish + // the request it is currently handling + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + if err := s.srv.Shutdown(ctx); err != nil { + logrus.WithError(err).Error("server forced to shutdown") + } + + logrus.Info("server shutdown completed") + + if s.Setup.BeforeRestart != nil { + s.Setup.BeforeRestart() + } + + bin := os.Args[0] + args := os.Args[0:] + + // filter out setup parameters + args = filterStrings(args, "--setup") + + envVars := os.Environ() + + logrus.Infof("exec %s %v", bin, args) + + if err := syscall.Exec(bin, args, envVars); err != nil { + logrus.WithError(err).Errorf("failed to restart %s", bin) + } + + s.Setup.Cancel() + }() + + c.JSON(http.StatusOK, gin.H{"success": true}) +} diff --git a/pkg/server/utils.go b/pkg/server/utils.go new file mode 100644 index 0000000000..96ffa9df53 --- /dev/null +++ b/pkg/server/utils.go @@ -0,0 +1,67 @@ +package server + +import ( + "context" + "encoding/json" + "net/http" + "os/exec" + "runtime" + "time" + + "github.com/sirupsen/logrus" +) + +func getJSON(url string, data interface{}) error { + var client = &http.Client{ + Timeout: 200 * time.Millisecond, + } + r, err := client.Get(url) + if err != nil { + return err + } + + defer r.Body.Close() + + return json.NewDecoder(r.Body).Decode(data) +} + +func openURL(url string) error { + cmd := exec.Command("open", url) + return cmd.Start() +} + +func filterStrings(slice []string, needle string) (ns []string) { + for _, str := range slice { + if str == needle { + continue + } + + ns = append(ns, str) + } + + return ns +} + +func openBrowser(ctx context.Context, bind string) { + if runtime.GOOS == "darwin" { + baseURL := "http://" + bind + go pingAndOpenURL(ctx, baseURL) + } else { + logrus.Warnf("%s is not supported for opening browser automatically", runtime.GOOS) + } +} + +func resolveBind(a []string) string { + switch len(a) { + case 0: + return DefaultBindAddress + + case 1: + return a[0] + + default: + panic("too many parameters for binding") + } + + return "" +} diff --git a/pkg/service/account.go b/pkg/service/account.go new file mode 100644 index 0000000000..d924932660 --- /dev/null +++ b/pkg/service/account.go @@ -0,0 +1,65 @@ +package service + +import ( + "github.com/c9s/bbgo/pkg/types" + "github.com/jmoiron/sqlx" + "go.uber.org/multierr" + "time" +) + +type AccountService struct { + DB *sqlx.DB +} + +func NewAccountService(db *sqlx.DB) *AccountService { + return &AccountService{DB: db} +} + +// TODO: should pass bbgo.ExchangeSession to this function, but that might cause cyclic import +func (s *AccountService) InsertAsset(time time.Time, session string, name types.ExchangeName, account string, isMargin bool, isIsolatedMargin bool, isolatedMarginSymbol string, assets types.AssetMap) error { + if s.DB == nil { + // skip db insert when no db connection setting. + return nil + } + + var err error + for _, v := range assets { + _, _err := s.DB.Exec(` + INSERT INTO nav_history_details ( + session, + exchange, + subaccount, + time, + currency, + net_asset_in_usd, + net_asset_in_btc, + balance, + available, + locked, + borrowed, + net_asset, + price_in_usd, + is_margin, is_isolated, isolated_symbol) + values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);`, + session, + name, + account, + time, + v.Currency, + v.InUSD, + v.InBTC, + v.Total, + v.Available, + v.Locked, + v.Borrowed, + v.NetAsset, + v.PriceInUSD, + isMargin, + isIsolatedMargin, + isolatedMarginSymbol) + + err = multierr.Append(err, _err) // successful request + + } + return err +} diff --git a/pkg/service/account_test.go b/pkg/service/account_test.go new file mode 100644 index 0000000000..89c0fa98cf --- /dev/null +++ b/pkg/service/account_test.go @@ -0,0 +1,41 @@ +package service + +import ( + "testing" + "time" + + "github.com/jmoiron/sqlx" + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func TestAccountService(t *testing.T) { + db, err := prepareDB(t) + if err != nil { + t.Fatal(err) + } + + defer db.Close() + + xdb := sqlx.NewDb(db.DB, "sqlite3") + service := &AccountService{DB: xdb} + + t1 := time.Now() + err = service.InsertAsset(t1, "binance", types.ExchangeBinance, "main", false, false, "", types.AssetMap{ + "BTC": types.Asset{ + Currency: "BTC", + Total: fixedpoint.MustNewFromString("1.0"), + InUSD: fixedpoint.MustNewFromString("10.0"), + InBTC: fixedpoint.MustNewFromString("0.0001"), + Time: t1, + Locked: fixedpoint.MustNewFromString("0"), + Available: fixedpoint.MustNewFromString("1.0"), + Borrowed: fixedpoint.MustNewFromString("0"), + NetAsset: fixedpoint.MustNewFromString("1"), + PriceInUSD: fixedpoint.MustNewFromString("44870"), + }, + }) + assert.NoError(t, err) +} diff --git a/pkg/service/backtest.go b/pkg/service/backtest.go index c44d992984..256514fcf3 100644 --- a/pkg/service/backtest.go +++ b/pkg/service/backtest.go @@ -2,13 +2,18 @@ package service import ( "context" + "database/sql" + "fmt" + "strconv" "strings" "time" + sq "github.com/Masterminds/squirrel" "github.com/jmoiron/sqlx" "github.com/pkg/errors" log "github.com/sirupsen/logrus" + "github.com/c9s/bbgo/pkg/exchange/batch" "github.com/c9s/bbgo/pkg/types" ) @@ -16,64 +21,122 @@ type BacktestService struct { DB *sqlx.DB } -func (s *BacktestService) Sync(ctx context.Context, exchange types.Exchange, symbol string, startTime time.Time) error { - now := time.Now() - for interval := range types.SupportedIntervals { - log.Infof("synchronizing lastKLine for interval %s from exchange %s", interval, exchange.Name()) +func (s *BacktestService) SyncKLineByInterval(ctx context.Context, exchange types.Exchange, symbol string, interval types.Interval, startTime, endTime time.Time) error { + log.Infof("synchronizing %s klines with interval %s: %s <=> %s", exchange.Name(), interval, startTime, endTime) - lastKLine, err := s.QueryLast(exchange.Name(), symbol, interval) - if err != nil { + // TODO: use isFutures here + _, _, isIsolated, isolatedSymbol := getExchangeAttributes(exchange) + // override symbol if isolatedSymbol is not empty + if isIsolated && len(isolatedSymbol) > 0 { + symbol = isolatedSymbol + } + + if s.DB.DriverName() == "sqlite3" { + _, _ = s.DB.Exec("PRAGMA journal_mode = WAL") + _, _ = s.DB.Exec("PRAGMA synchronous = NORMAL") + } + + tasks := []SyncTask{ + { + Type: types.KLine{}, + Select: SelectLastKLines(exchange.Name(), symbol, interval, startTime, endTime, 100), + Time: func(obj interface{}) time.Time { + return obj.(types.KLine).StartTime.Time() + }, + ID: func(obj interface{}) string { + kline := obj.(types.KLine) + return strconv.FormatInt(kline.StartTime.UnixMilli(), 10) + // return kline.Symbol + kline.Interval.String() + strconv.FormatInt(kline.StartTime.UnixMilli(), 10) + }, + BatchQuery: func(ctx context.Context, startTime, endTime time.Time) (interface{}, chan error) { + q := &batch.KLineBatchQuery{Exchange: exchange} + return q.Query(ctx, symbol, interval, startTime, endTime) + }, + BatchInsertBuffer: 1000, + BatchInsert: func(obj interface{}) error { + kLines := obj.([]types.KLine) + return s.BatchInsert(kLines) + }, + Insert: func(obj interface{}) error { + kline := obj.(types.KLine) + return s.Insert(kline) + }, + LogInsert: log.GetLevel() == log.DebugLevel, + }, + } + + for _, sel := range tasks { + if err := sel.execute(ctx, s.DB, startTime, endTime); err != nil { return err } + } - if lastKLine != nil { - log.Infof("found last checkpoint %s", lastKLine.EndTime) - startTime = lastKLine.StartTime.Add(time.Minute) - } + return nil +} - batch := &types.ExchangeBatchProcessor{Exchange: exchange} +func (s *BacktestService) Verify(sourceExchange types.Exchange, symbols []string, startTime time.Time, endTime time.Time) error { + var corruptCnt = 0 + for _, symbol := range symbols { + for interval := range types.SupportedIntervals { + log.Infof("verifying %s %s backtesting data: %s to %s...", symbol, interval, startTime, endTime) - // should use channel here - klineC, errC := batch.BatchQueryKLines(ctx, symbol, interval, startTime, now) - // var previousKLine types.KLine - for k := range klineC { - if err := s.Insert(k); err != nil { + timeRanges, err := s.FindMissingTimeRanges(context.Background(), sourceExchange, symbol, interval, + startTime, endTime) + if err != nil { return err } - } - if err := <-errC; err != nil { - return err + if len(timeRanges) == 0 { + continue + } + + log.Warnf("%s %s found missing time ranges:", symbol, interval) + corruptCnt += len(timeRanges) + for _, timeRange := range timeRanges { + log.Warnf("- %s", timeRange.String()) + } } } + log.Infof("backtest verification completed") + if corruptCnt > 0 { + log.Errorf("found %d corruptions", corruptCnt) + } else { + log.Infof("found %d corruptions", corruptCnt) + } + return nil } -// QueryLast queries the last order from the database -func (s *BacktestService) QueryLast(ex types.ExchangeName, symbol string, interval types.Interval) (*types.KLine, error) { +func (s *BacktestService) SyncFresh(ctx context.Context, exchange types.Exchange, symbol string, interval types.Interval, startTime, endTime time.Time) error { + log.Infof("starting fresh sync %s %s %s: %s <=> %s", exchange.Name(), symbol, interval, startTime, endTime) + startTime = startTime.Truncate(time.Minute).Add(-2 * time.Second) + endTime = endTime.Truncate(time.Minute).Add(2 * time.Second) + return s.SyncKLineByInterval(ctx, exchange, symbol, interval, startTime, endTime) +} + +// QueryKLine queries the klines from the database +func (s *BacktestService) QueryKLine(ex types.ExchangeName, symbol string, interval types.Interval, orderBy string, limit int) (*types.KLine, error) { log.Infof("querying last kline exchange = %s AND symbol = %s AND interval = %s", ex, symbol, interval) + tableName := targetKlineTable(ex) // make the SQL syntax IDE friendly, so that it can analyze it. - sql := "SELECT * FROM binance_klines WHERE `symbol` = :symbol AND `interval` = :interval ORDER BY end_time DESC LIMIT 1" - sql = strings.ReplaceAll(sql, "binance_klines", ex.String()+"_klines") + sql := fmt.Sprintf("SELECT * FROM `%s` WHERE `symbol` = :symbol AND `interval` = :interval ORDER BY end_time "+orderBy+" LIMIT "+strconv.Itoa(limit), tableName) rows, err := s.DB.NamedQuery(sql, map[string]interface{}{ - "exchange": ex, "interval": interval, "symbol": symbol, }) + defer rows.Close() if err != nil { - return nil, errors.Wrap(err, "query last order error") + return nil, errors.Wrap(err, "query kline error") } if rows.Err() != nil { return nil, rows.Err() } - defer rows.Close() - if rows.Next() { var kline types.KLine err = rows.StructScan(&kline) @@ -83,14 +146,18 @@ func (s *BacktestService) QueryLast(ex types.ExchangeName, symbol string, interv return nil, rows.Err() } -func (s *BacktestService) QueryKLinesForward(exchange types.ExchangeName, symbol string, interval types.Interval, startTime time.Time) ([]types.KLine, error) { - sql := "SELECT * FROM `binance_klines` WHERE `end_time` >= :startTime AND `symbol` = :symbol AND `interval` = :interval ORDER BY end_time ASC" - sql = strings.ReplaceAll(sql, "binance_klines", exchange.String()+"_klines") +// QueryKLinesForward is used for querying klines to back-testing +func (s *BacktestService) QueryKLinesForward(exchange types.ExchangeName, symbol string, interval types.Interval, startTime time.Time, limit int) ([]types.KLine, error) { + tableName := targetKlineTable(exchange) + sql := "SELECT * FROM `binance_klines` WHERE `end_time` >= :start_time AND `symbol` = :symbol AND `interval` = :interval and exchange = :exchange ORDER BY end_time ASC LIMIT :limit" + sql = strings.ReplaceAll(sql, "binance_klines", tableName) rows, err := s.DB.NamedQuery(sql, map[string]interface{}{ - "startTime": startTime, - "symbol": symbol, - "interval": interval, + "start_time": startTime, + "limit": limit, + "symbol": symbol, + "interval": interval, + "exchange": exchange.String(), }) if err != nil { return nil, err @@ -99,14 +166,19 @@ func (s *BacktestService) QueryKLinesForward(exchange types.ExchangeName, symbol return s.scanRows(rows) } -func (s *BacktestService) QueryKLinesBackward(exchange types.ExchangeName, symbol string, interval types.Interval, endTime time.Time) ([]types.KLine, error) { - sql := "SELECT * FROM `binance_klines` WHERE `end_time` <= :endTime AND `symbol` = :symbol AND `interval` = :interval ORDER BY end_time ASC" - sql = strings.ReplaceAll(sql, "binance_klines", exchange.String()+"_klines") +func (s *BacktestService) QueryKLinesBackward(exchange types.ExchangeName, symbol string, interval types.Interval, endTime time.Time, limit int) ([]types.KLine, error) { + tableName := targetKlineTable(exchange) + + sql := "SELECT * FROM `binance_klines` WHERE `end_time` <= :end_time and exchange = :exchange AND `symbol` = :symbol AND `interval` = :interval ORDER BY end_time DESC LIMIT :limit" + sql = strings.ReplaceAll(sql, "binance_klines", tableName) + sql = "SELECT t.* FROM (" + sql + ") AS t ORDER BY t.end_time ASC" rows, err := s.DB.NamedQuery(sql, map[string]interface{}{ - "endTime": endTime, + "limit": limit, + "end_time": endTime, "symbol": symbol, "interval": interval, + "exchange": exchange.String(), }) if err != nil { return nil, err @@ -116,36 +188,57 @@ func (s *BacktestService) QueryKLinesBackward(exchange types.ExchangeName, symbo } func (s *BacktestService) QueryKLinesCh(since, until time.Time, exchange types.Exchange, symbols []string, intervals []types.Interval) (chan types.KLine, chan error) { - sql := "SELECT * FROM `binance_klines` WHERE `end_time` BETWEEN :since AND :until AND `symbol` IN (:symbols) AND `interval` IN (:intervals) ORDER BY end_time ASC" - sql = strings.ReplaceAll(sql, "binance_klines", exchange.Name().String()+"_klines") + if len(symbols) == 0 { + return returnError(errors.Errorf("symbols is empty when querying kline, plesae check your strategy setting. ")) + } + + tableName := targetKlineTable(exchange.Name()) + var query string - sql, args, err := sqlx.Named(sql, map[string]interface{}{ + if len(symbols) == 1 { + query = "SELECT * FROM `binance_klines` WHERE `end_time` BETWEEN :since AND :until AND `symbol` = :symbols AND `interval` IN (:intervals) ORDER BY end_time ASC" + } else { + query = "SELECT * FROM `binance_klines` WHERE `end_time` BETWEEN :since AND :until AND `symbol` IN (:symbols) AND `interval` IN (:intervals) ORDER BY end_time ASC" + } + + query = strings.ReplaceAll(query, "binance_klines", tableName) + + sql, args, err := sqlx.Named(query, map[string]interface{}{ "since": since, "until": until, + "symbol": symbols[0], "symbols": symbols, "intervals": types.IntervalSlice(intervals), }) sql, args, err = sqlx.In(sql, args...) + if err != nil { + return returnError(err) + } sql = s.DB.Rebind(sql) rows, err := s.DB.Queryx(sql, args...) if err != nil { - log.WithError(err).Error("query error") - - errC := make(chan error, 1) - - // avoid blocking - go func() { - errC <- err - close(errC) - }() - return nil, errC + return returnError(err) } return s.scanRowsCh(rows) } +func returnError(err error) (chan types.KLine, chan error) { + ch := make(chan types.KLine) + close(ch) + log.WithError(err).Error("backtest query error") + + errC := make(chan error, 1) + // avoid blocking + go func() { + errC <- err + close(errC) + }() + return ch, errC +} + // scanRowsCh scan rows into channel func (s *BacktestService) scanRowsCh(rows *sqlx.Rows) (chan types.KLine, chan error) { ch := make(chan types.KLine, 500) @@ -177,6 +270,7 @@ func (s *BacktestService) scanRowsCh(rows *sqlx.Rows) (chan types.KLine, chan er } func (s *BacktestService) scanRows(rows *sqlx.Rows) (klines []types.KLine, err error) { + defer rows.Close() for rows.Next() { var kline types.KLine if err := rows.StructScan(&kline); err != nil { @@ -189,15 +283,248 @@ func (s *BacktestService) scanRows(rows *sqlx.Rows) (klines []types.KLine, err e return klines, rows.Err() } +func targetKlineTable(exchangeName types.ExchangeName) string { + return strings.ToLower(exchangeName.String()) + "_klines" +} + +var errExchangeFieldIsUnset = errors.New("kline.Exchange field should not be empty") + func (s *BacktestService) Insert(kline types.KLine) error { if len(kline.Exchange) == 0 { - return errors.New("kline.Exchange field should not be empty") + return errExchangeFieldIsUnset } - sql := "INSERT INTO `binance_klines` (`exchange`, `start_time`, `end_time`, `symbol`, `interval`, `open`, `high`, `low`, `close`, `closed`, `volume`)" + - "VALUES (:exchange, :start_time, :end_time, :symbol, :interval, :open, :high, :low, :close, :closed, :volume)" - sql = strings.ReplaceAll(sql, "binance_klines", kline.Exchange+"_klines") + tableName := targetKlineTable(kline.Exchange) + + sql := fmt.Sprintf("INSERT INTO `%s` (`exchange`, `start_time`, `end_time`, `symbol`, `interval`, `open`, `high`, `low`, `close`, `closed`, `volume`, `quote_volume`, `taker_buy_base_volume`, `taker_buy_quote_volume`)"+ + "VALUES (:exchange, :start_time, :end_time, :symbol, :interval, :open, :high, :low, :close, :closed, :volume, :quote_volume, :taker_buy_base_volume, :taker_buy_quote_volume)", tableName) _, err := s.DB.NamedExec(sql, kline) return err } + +// BatchInsert Note: all kline should be same exchange, or it will cause issue. +func (s *BacktestService) BatchInsert(kline []types.KLine) error { + if len(kline) == 0 { + return nil + } + + tableName := targetKlineTable(kline[0].Exchange) + + sql := fmt.Sprintf("INSERT INTO `%s` (`exchange`, `start_time`, `end_time`, `symbol`, `interval`, `open`, `high`, `low`, `close`, `closed`, `volume`, `quote_volume`, `taker_buy_base_volume`, `taker_buy_quote_volume`)"+ + " VALUES (:exchange, :start_time, :end_time, :symbol, :interval, :open, :high, :low, :close, :closed, :volume, :quote_volume, :taker_buy_base_volume, :taker_buy_quote_volume); ", tableName) + + tx := s.DB.MustBegin() + if _, err := tx.NamedExec(sql, kline); err != nil { + if e := tx.Rollback(); e != nil { + log.WithError(e).Fatalf("cannot rollback insertion %v", err) + } + return err + } + return tx.Commit() +} + +type TimeRange struct { + Start time.Time + End time.Time +} + +func (t *TimeRange) String() string { + return t.Start.String() + " ~ " + t.End.String() +} + +func (s *BacktestService) Sync(ctx context.Context, ex types.Exchange, symbol string, interval types.Interval, since, until time.Time) error { + t1, t2, err := s.QueryExistingDataRange(ctx, ex, symbol, interval, since, until) + if err != nil && err != sql.ErrNoRows { + return err + } + + if err == sql.ErrNoRows || t1 == nil || t2 == nil { + // fallback to fresh sync + return s.SyncFresh(ctx, ex, symbol, interval, since, until) + } + + return s.SyncPartial(ctx, ex, symbol, interval, since, until) +} + +// SyncPartial +// find the existing data time range (t1, t2) +// scan if there is a missing part +// create a time range slice []TimeRange +// iterate the []TimeRange slice to sync data. +func (s *BacktestService) SyncPartial(ctx context.Context, ex types.Exchange, symbol string, interval types.Interval, since, until time.Time) error { + log.Infof("starting partial sync %s %s %s: %s <=> %s", ex.Name(), symbol, interval, since, until) + + t1, t2, err := s.QueryExistingDataRange(ctx, ex, symbol, interval, since, until) + if err != nil && err != sql.ErrNoRows { + return err + } + + if err == sql.ErrNoRows || t1 == nil || t2 == nil { + // fallback to fresh sync + return s.SyncFresh(ctx, ex, symbol, interval, since, until) + } + + timeRanges, err := s.FindMissingTimeRanges(ctx, ex, symbol, interval, t1.Time(), t2.Time()) + if err != nil { + return err + } + + if len(timeRanges) > 0 { + log.Infof("found missing data time ranges: %v", timeRanges) + } + + // there are few cases: + // t1 == since && t2 == until + // [since] ------- [t1] data [t2] ------ [until] + if since.Before(t1.Time()) && t1.Time().Sub(since) > interval.Duration() { + // shift slice + timeRanges = append([]TimeRange{ + {Start: since.Add(-2 * time.Second), End: t1.Time()}, // we should include since + }, timeRanges...) + } + + if t2.Time().Before(until) && until.Sub(t2.Time()) > interval.Duration() { + timeRanges = append(timeRanges, TimeRange{ + Start: t2.Time(), + End: until.Add(-interval.Duration()), // include until + }) + } + + for _, timeRange := range timeRanges { + err = s.SyncKLineByInterval(ctx, ex, symbol, interval, timeRange.Start.Add(time.Second), timeRange.End.Add(-time.Second)) + if err != nil { + return err + } + } + + return nil +} + +// FindMissingTimeRanges returns the missing time ranges, the start/end time represents the existing data time points. +// So when sending kline query to the exchange API, we need to add one second to the start time and minus one second to the end time. +func (s *BacktestService) FindMissingTimeRanges(ctx context.Context, ex types.Exchange, symbol string, interval types.Interval, since, until time.Time) ([]TimeRange, error) { + query := SelectKLineTimePoints(ex.Name(), symbol, interval, since, until) + sql, args, err := query.ToSql() + if err != nil { + return nil, err + } + + rows, err := s.DB.QueryContext(ctx, sql, args...) + defer rows.Close() + if err != nil { + return nil, err + } + + var timeRanges []TimeRange + var lastTime = since + var intervalDuration = interval.Duration() + for rows.Next() { + var tt types.Time + if err := rows.Scan(&tt); err != nil { + return nil, err + } + + var t = time.Time(tt) + if t.Sub(lastTime) > intervalDuration { + timeRanges = append(timeRanges, TimeRange{ + Start: lastTime, + End: t, + }) + } + + lastTime = t + } + + if lastTime.Before(until) && until.Sub(lastTime) > intervalDuration { + timeRanges = append(timeRanges, TimeRange{ + Start: lastTime, + End: until, + }) + } + + return timeRanges, nil +} + +func (s *BacktestService) QueryExistingDataRange(ctx context.Context, ex types.Exchange, symbol string, interval types.Interval, tArgs ...time.Time) (start, end *types.Time, err error) { + sel := SelectKLineTimeRange(ex.Name(), symbol, interval, tArgs...) + sql, args, err := sel.ToSql() + if err != nil { + return nil, nil, err + } + + var t1, t2 types.Time + + row := s.DB.QueryRowContext(ctx, sql, args...) + + if err := row.Scan(&t1, &t2); err != nil { + return nil, nil, err + } + + if err := row.Err(); err != nil { + return nil, nil, err + } + + if t1 == (types.Time{}) || t2 == (types.Time{}) { + return nil, nil, nil + } + + return &t1, &t2, nil +} + +func SelectKLineTimePoints(ex types.ExchangeName, symbol string, interval types.Interval, args ...time.Time) sq.SelectBuilder { + conditions := sq.And{ + sq.Eq{"symbol": symbol}, + sq.Eq{"`interval`": interval.String()}, + } + + if len(args) == 2 { + since := args[0] + until := args[1] + conditions = append(conditions, sq.Expr("`start_time` BETWEEN ? AND ?", since, until)) + } + + tableName := targetKlineTable(ex) + + return sq.Select("start_time"). + From(tableName). + Where(conditions). + OrderBy("start_time ASC") +} + +// SelectKLineTimeRange returns the existing klines time range (since < kline.start_time < until) +func SelectKLineTimeRange(ex types.ExchangeName, symbol string, interval types.Interval, args ...time.Time) sq.SelectBuilder { + conditions := sq.And{ + sq.Eq{"symbol": symbol}, + sq.Eq{"`interval`": interval.String()}, + } + + if len(args) == 2 { + // NOTE + // sqlite does not support timezone format, so we are converting to local timezone + // mysql works in this case, so this is a workaround + since := args[0] + until := args[1] + conditions = append(conditions, sq.Expr("`start_time` BETWEEN ? AND ?", since, until)) + } + + tableName := targetKlineTable(ex) + + return sq.Select("MIN(start_time) AS t1, MAX(start_time) AS t2"). + From(tableName). + Where(conditions) +} + +// TODO: add is_futures column since the klines data is different +func SelectLastKLines(ex types.ExchangeName, symbol string, interval types.Interval, startTime, endTime time.Time, limit uint64) sq.SelectBuilder { + tableName := targetKlineTable(ex) + return sq.Select("*"). + From(tableName). + Where(sq.And{ + sq.Eq{"symbol": symbol}, + sq.Eq{"`interval`": interval.String()}, + sq.Expr("start_time BETWEEN ? AND ?", startTime, endTime), + }). + OrderBy("start_time DESC"). + Limit(limit) +} diff --git a/pkg/service/backtest_test.go b/pkg/service/backtest_test.go new file mode 100644 index 0000000000..ca863e2871 --- /dev/null +++ b/pkg/service/backtest_test.go @@ -0,0 +1,169 @@ +package service + +import ( + "context" + "database/sql" + "testing" + "time" + + "github.com/jmoiron/sqlx" + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/exchange" + "github.com/c9s/bbgo/pkg/types" +) + +func TestBacktestService_FindMissingTimeRanges_EmptyData(t *testing.T) { + db, err := prepareDB(t) + if err != nil { + t.Fatal(err) + } + + defer db.Close() + + ctx := context.Background() + dbx := sqlx.NewDb(db.DB, "sqlite3") + + ex, err := exchange.NewPublic(types.ExchangeBinance) + assert.NoError(t, err) + + service := &BacktestService{DB: dbx} + + symbol := "BTCUSDT" + now := time.Now() + startTime1 := now.AddDate(0, 0, -7).Truncate(time.Hour) + endTime1 := now.AddDate(0, 0, -6).Truncate(time.Hour) + timeRanges, err := service.FindMissingTimeRanges(ctx, ex, symbol, types.Interval1h, startTime1, endTime1) + assert.NoError(t, err) + assert.NotEmpty(t, timeRanges) +} + +func TestBacktestService_QueryExistingDataRange(t *testing.T) { + db, err := prepareDB(t) + if err != nil { + t.Fatal(err) + } + + defer db.Close() + + ctx := context.Background() + dbx := sqlx.NewDb(db.DB, "sqlite3") + + ex, err := exchange.NewPublic(types.ExchangeBinance) + assert.NoError(t, err) + + service := &BacktestService{DB: dbx} + + symbol := "BTCUSDT" + now := time.Now() + startTime1 := now.AddDate(0, 0, -7).Truncate(time.Hour) + endTime1 := now.AddDate(0, 0, -6).Truncate(time.Hour) + // empty range + t1, t2, err := service.QueryExistingDataRange(ctx, ex, symbol, types.Interval1h, startTime1, endTime1) + assert.Error(t, sql.ErrNoRows, err) + assert.Nil(t, t1) + assert.Nil(t, t2) +} + +func TestBacktestService_SyncPartial(t *testing.T) { + db, err := prepareDB(t) + if err != nil { + t.Fatal(err) + } + + defer db.Close() + + ctx := context.Background() + dbx := sqlx.NewDb(db.DB, "sqlite3") + + ex, err := exchange.NewPublic(types.ExchangeBinance) + assert.NoError(t, err) + + service := &BacktestService{DB: dbx} + + symbol := "BTCUSDT" + now := time.Now() + startTime1 := now.AddDate(0, 0, -7).Truncate(time.Hour) + endTime1 := now.AddDate(0, 0, -6).Truncate(time.Hour) + + startTime2 := now.AddDate(0, 0, -5).Truncate(time.Hour) + endTime2 := now.AddDate(0, 0, -4).Truncate(time.Hour) + + // kline query is exclusive + err = service.SyncKLineByInterval(ctx, ex, symbol, types.Interval1h, startTime1.Add(-time.Second), endTime1.Add(time.Second)) + assert.NoError(t, err) + + err = service.SyncKLineByInterval(ctx, ex, symbol, types.Interval1h, startTime2.Add(-time.Second), endTime2.Add(time.Second)) + assert.NoError(t, err) + + timeRanges, err := service.FindMissingTimeRanges(ctx, ex, symbol, types.Interval1h, startTime1, endTime2) + assert.NoError(t, err) + assert.NotEmpty(t, timeRanges) + assert.Len(t, timeRanges, 1) + + t.Run("fill missing time ranges", func(t *testing.T) { + err = service.SyncPartial(ctx, ex, symbol, types.Interval1h, startTime1, endTime2) + assert.NoError(t, err, "sync partial should not return error") + + timeRanges2, err := service.FindMissingTimeRanges(ctx, ex, symbol, types.Interval1h, startTime1, endTime2) + assert.NoError(t, err) + assert.Empty(t, timeRanges2) + }) +} + +func TestBacktestService_FindMissingTimeRanges(t *testing.T) { + db, err := prepareDB(t) + if err != nil { + t.Fatal(err) + } + + defer db.Close() + + ctx := context.Background() + dbx := sqlx.NewDb(db.DB, "sqlite3") + + ex, err := exchange.NewPublic(types.ExchangeBinance) + assert.NoError(t, err) + + service := &BacktestService{DB: dbx} + + symbol := "BTCUSDT" + now := time.Now() + startTime1 := now.AddDate(0, 0, -6).Truncate(time.Hour) + endTime1 := now.AddDate(0, 0, -5).Truncate(time.Hour) + + startTime2 := now.AddDate(0, 0, -4).Truncate(time.Hour) + endTime2 := now.AddDate(0, 0, -3).Truncate(time.Hour) + + // kline query is exclusive + err = service.SyncKLineByInterval(ctx, ex, symbol, types.Interval1h, startTime1.Add(-time.Second), endTime1.Add(time.Second)) + assert.NoError(t, err) + + err = service.SyncKLineByInterval(ctx, ex, symbol, types.Interval1h, startTime2.Add(-time.Second), endTime2.Add(time.Second)) + assert.NoError(t, err) + + t1, t2, err := service.QueryExistingDataRange(ctx, ex, symbol, types.Interval1h) + if assert.NoError(t, err) { + assert.Equal(t, startTime1, t1.Time(), "start time point should match") + assert.Equal(t, endTime2, t2.Time(), "end time point should match") + } + + timeRanges, err := service.FindMissingTimeRanges(ctx, ex, symbol, types.Interval1h, startTime1, endTime2) + if assert.NoError(t, err) { + assert.NotEmpty(t, timeRanges) + assert.Len(t, timeRanges, 1, "should find one missing time range") + t.Logf("found timeRanges: %+v", timeRanges) + + log.SetLevel(log.DebugLevel) + + for _, timeRange := range timeRanges { + err = service.SyncKLineByInterval(ctx, ex, symbol, types.Interval1h, timeRange.Start.Add(time.Second), timeRange.End.Add(-time.Second)) + assert.NoError(t, err) + } + + timeRanges, err = service.FindMissingTimeRanges(ctx, ex, symbol, types.Interval1h, startTime1, endTime2) + assert.NoError(t, err) + assert.Empty(t, timeRanges, "after partial sync, missing time ranges should be back-filled") + } +} diff --git a/pkg/service/database.go b/pkg/service/database.go new file mode 100644 index 0000000000..3719b82311 --- /dev/null +++ b/pkg/service/database.go @@ -0,0 +1,97 @@ +package service + +import ( + "context" + + "github.com/c9s/rockhopper" + "github.com/go-sql-driver/mysql" + "github.com/jmoiron/sqlx" + + mysqlMigrations "github.com/c9s/bbgo/pkg/migrations/mysql" + sqlite3Migrations "github.com/c9s/bbgo/pkg/migrations/sqlite3" +) + +// reflect cache for database +var dbCache = NewReflectCache() + +type DatabaseService struct { + Driver string + DSN string + DB *sqlx.DB +} + +func NewDatabaseService(driver, dsn string) *DatabaseService { + if driver == "mysql" { + var err error + dsn, err = ReformatMysqlDSN(dsn) + if err != nil { + // incorrect mysql dsn is logical exception + panic(err) + } + } + + return &DatabaseService{ + Driver: driver, + DSN: dsn, + } + +} + +func (s *DatabaseService) Connect() error { + var err error + s.DB, err = sqlx.Connect(s.Driver, s.DSN) + return err +} + +func (s *DatabaseService) Insert(record interface{}) error { + sql := dbCache.InsertSqlOf(record) + _, err := s.DB.NamedExec(sql, record) + return err +} + +func (s *DatabaseService) Close() error { + return s.DB.Close() +} + +func (s *DatabaseService) Upgrade(ctx context.Context) error { + dialect, err := rockhopper.LoadDialect(s.Driver) + if err != nil { + return err + } + + var migrations rockhopper.MigrationSlice + + switch s.Driver { + case "sqlite3": + migrations = sqlite3Migrations.Migrations() + case "mysql": + migrations = mysqlMigrations.Migrations() + + } + + // sqlx.DB is different from sql.DB + rh := rockhopper.New(s.Driver, dialect, s.DB.DB) + + currentVersion, err := rh.CurrentVersion() + if err != nil { + return err + } + + if err := rockhopper.Up(ctx, rh, migrations, currentVersion, 0); err != nil { + return err + } + + return nil +} + +func ReformatMysqlDSN(dsn string) (string, error) { + config, err := mysql.ParseDSN(dsn) + if err != nil { + return "", err + } + + // we need timestamp and datetime fields to be parsed into time.Time struct + config.ParseTime = true + dsn = config.FormatDSN() + return dsn, nil +} diff --git a/pkg/service/db_test.go b/pkg/service/db_test.go new file mode 100644 index 0000000000..3093f7dd1b --- /dev/null +++ b/pkg/service/db_test.go @@ -0,0 +1,44 @@ +package service + +import ( + "context" + "testing" + + "github.com/c9s/rockhopper" + "github.com/stretchr/testify/assert" +) + +func prepareDB(t *testing.T) (*rockhopper.DB, error) { + dialect, err := rockhopper.LoadDialect("sqlite3") + if !assert.NoError(t, err) { + return nil, err + } + + assert.NotNil(t, dialect) + + db, err := rockhopper.Open("sqlite3", dialect, ":memory:") + if !assert.NoError(t, err) { + return nil, err + } + + assert.NotNil(t, db) + + _, err = db.CurrentVersion() + if !assert.NoError(t, err) { + return nil, err + } + + var loader rockhopper.SqlMigrationLoader + migrations, err := loader.Load("../../migrations/sqlite3") + if !assert.NoError(t, err) { + return nil, err + } + + assert.NotEmpty(t, migrations) + + ctx := context.Background() + err = rockhopper.Up(ctx, db, migrations, 0, 0) + assert.NoError(t, err, "should migrate successfully") + + return db, err +} diff --git a/pkg/service/deposit.go b/pkg/service/deposit.go new file mode 100644 index 0000000000..b892cdb810 --- /dev/null +++ b/pkg/service/deposit.go @@ -0,0 +1,101 @@ +package service + +import ( + "context" + "time" + + sq "github.com/Masterminds/squirrel" + "github.com/jmoiron/sqlx" + + "github.com/c9s/bbgo/pkg/exchange/batch" + "github.com/c9s/bbgo/pkg/types" +) + +type DepositService struct { + DB *sqlx.DB +} + +// Sync syncs the withdraw records into db +func (s *DepositService) Sync(ctx context.Context, ex types.Exchange, startTime time.Time) error { + isMargin, isFutures, isIsolated, _ := getExchangeAttributes(ex) + if isMargin || isFutures || isIsolated { + // only works in spot + return nil + } + + transferApi, ok := ex.(types.ExchangeTransferService) + if !ok { + return nil + } + + tasks := []SyncTask{ + { + Type: types.Deposit{}, + Select: SelectLastDeposits(ex.Name(), 100), + BatchQuery: func(ctx context.Context, startTime, endTime time.Time) (interface{}, chan error) { + query := &batch.DepositBatchQuery{ + ExchangeTransferService: transferApi, + } + return query.Query(ctx, "", startTime, endTime) + }, + Time: func(obj interface{}) time.Time { + return obj.(types.Deposit).Time.Time() + }, + ID: func(obj interface{}) string { + deposit := obj.(types.Deposit) + return deposit.TransactionID + }, + Filter: func(obj interface{}) bool { + deposit := obj.(types.Deposit) + return len(deposit.TransactionID) != 0 + }, + LogInsert: true, + }, + } + + for _, sel := range tasks { + if err := sel.execute(ctx, s.DB, startTime); err != nil { + return err + } + } + + return nil +} + +func (s *DepositService) Query(exchangeName types.ExchangeName) ([]types.Deposit, error) { + args := map[string]interface{}{ + "exchange": exchangeName, + } + sql := "SELECT * FROM `deposits` WHERE `exchange` = :exchange ORDER BY `time` ASC" + rows, err := s.DB.NamedQuery(sql, args) + if err != nil { + return nil, err + } + + defer rows.Close() + + return s.scanRows(rows) +} + +func (s *DepositService) scanRows(rows *sqlx.Rows) (deposits []types.Deposit, err error) { + for rows.Next() { + var deposit types.Deposit + if err := rows.StructScan(&deposit); err != nil { + return deposits, err + } + + deposits = append(deposits, deposit) + } + + return deposits, rows.Err() +} + +func SelectLastDeposits(ex types.ExchangeName, limit uint64) sq.SelectBuilder { + return sq.Select("*"). + From("deposits"). + Where(sq.And{ + sq.Eq{"exchange": ex}, + }). + OrderBy("time DESC"). + Limit(limit) +} diff --git a/pkg/service/deposit_test.go b/pkg/service/deposit_test.go new file mode 100644 index 0000000000..6d43c3366c --- /dev/null +++ b/pkg/service/deposit_test.go @@ -0,0 +1 @@ +package service diff --git a/pkg/service/errors.go b/pkg/service/errors.go new file mode 100644 index 0000000000..516301d5b1 --- /dev/null +++ b/pkg/service/errors.go @@ -0,0 +1,5 @@ +package service + +import "github.com/pkg/errors" + +var ErrPersistenceNotExists = errors.New("persistent data does not exists") diff --git a/pkg/service/margin.go b/pkg/service/margin.go new file mode 100644 index 0000000000..1794712f4e --- /dev/null +++ b/pkg/service/margin.go @@ -0,0 +1,147 @@ +package service + +import ( + "context" + "strconv" + "time" + + sq "github.com/Masterminds/squirrel" + "github.com/jmoiron/sqlx" + + "github.com/c9s/bbgo/pkg/exchange/batch" + "github.com/c9s/bbgo/pkg/types" +) + +type MarginService struct { + DB *sqlx.DB +} + +func (s *MarginService) Sync(ctx context.Context, ex types.Exchange, asset string, startTime time.Time) error { + api, ok := ex.(types.MarginHistory) + if !ok { + return nil + } + + marginExchange, ok := ex.(types.MarginExchange) + if !ok { + return nil + } + + marginSettings := marginExchange.GetMarginSettings() + if !marginSettings.IsMargin { + return nil + } + + tasks := []SyncTask{ + { + Select: SelectLastMarginLoans(ex.Name(), 100), + Type: types.MarginLoan{}, + BatchQuery: func(ctx context.Context, startTime, endTime time.Time) (interface{}, chan error) { + query := &batch.MarginLoanBatchQuery{ + MarginHistory: api, + } + return query.Query(ctx, asset, startTime, endTime) + }, + Time: func(obj interface{}) time.Time { + return obj.(types.MarginLoan).Time.Time() + }, + ID: func(obj interface{}) string { + return strconv.FormatUint(obj.(types.MarginLoan).TransactionID, 10) + }, + LogInsert: true, + }, + { + Select: SelectLastMarginRepays(ex.Name(), 100), + Type: types.MarginRepay{}, + BatchQuery: func(ctx context.Context, startTime, endTime time.Time) (interface{}, chan error) { + query := &batch.MarginRepayBatchQuery{ + MarginHistory: api, + } + return query.Query(ctx, asset, startTime, endTime) + }, + Time: func(obj interface{}) time.Time { + return obj.(types.MarginRepay).Time.Time() + }, + ID: func(obj interface{}) string { + return strconv.FormatUint(obj.(types.MarginRepay).TransactionID, 10) + }, + LogInsert: true, + }, + { + Select: SelectLastMarginInterests(ex.Name(), 100), + Type: types.MarginInterest{}, + BatchQuery: func(ctx context.Context, startTime, endTime time.Time) (interface{}, chan error) { + query := &batch.MarginInterestBatchQuery{ + MarginHistory: api, + } + return query.Query(ctx, asset, startTime, endTime) + }, + Time: func(obj interface{}) time.Time { + return obj.(types.MarginInterest).Time.Time() + }, + ID: func(obj interface{}) string { + m := obj.(types.MarginInterest) + return m.Asset + m.IsolatedSymbol + strconv.FormatInt(m.Time.UnixMilli(), 10) + }, + LogInsert: true, + }, + { + Select: SelectLastMarginLiquidations(ex.Name(), 100), + Type: types.MarginLiquidation{}, + BatchQuery: func(ctx context.Context, startTime, endTime time.Time) (interface{}, chan error) { + query := &batch.MarginLiquidationBatchQuery{ + MarginHistory: api, + } + return query.Query(ctx, startTime, endTime) + }, + Time: func(obj interface{}) time.Time { + return obj.(types.MarginLiquidation).UpdatedTime.Time() + }, + ID: func(obj interface{}) string { + m := obj.(types.MarginLiquidation) + return strconv.FormatUint(m.OrderID, 10) + }, + LogInsert: true, + }, + } + + for _, sel := range tasks { + if err := sel.execute(ctx, s.DB, startTime); err != nil { + return err + } + } + + return nil +} + +func SelectLastMarginLoans(ex types.ExchangeName, limit uint64) sq.SelectBuilder { + return sq.Select("*"). + From("margin_loans"). + Where(sq.Eq{"exchange": ex}). + OrderBy("time DESC"). + Limit(limit) +} + +func SelectLastMarginRepays(ex types.ExchangeName, limit uint64) sq.SelectBuilder { + return sq.Select("*"). + From("margin_repays"). + Where(sq.Eq{"exchange": ex}). + OrderBy("time DESC"). + Limit(limit) +} + +func SelectLastMarginInterests(ex types.ExchangeName, limit uint64) sq.SelectBuilder { + return sq.Select("*"). + From("margin_interests"). + Where(sq.Eq{"exchange": ex}). + OrderBy("time DESC"). + Limit(limit) +} + +func SelectLastMarginLiquidations(ex types.ExchangeName, limit uint64) sq.SelectBuilder { + return sq.Select("*"). + From("margin_liquidations"). + Where(sq.Eq{"exchange": ex}). + OrderBy("time DESC"). + Limit(limit) +} diff --git a/pkg/service/margin_test.go b/pkg/service/margin_test.go new file mode 100644 index 0000000000..5fa85265d2 --- /dev/null +++ b/pkg/service/margin_test.go @@ -0,0 +1,52 @@ +package service + +import ( + "context" + "testing" + "time" + + "github.com/jmoiron/sqlx" + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/exchange/binance" + "github.com/c9s/bbgo/pkg/testutil" +) + +func TestMarginService(t *testing.T) { + key, secret, ok := testutil.IntegrationTestConfigured(t, "BINANCE") + if !ok { + t.SkipNow() + return + } + + ex := binance.New(key, secret) + ex.MarginSettings.IsMargin = true + ex.MarginSettings.IsIsolatedMargin = true + ex.MarginSettings.IsolatedMarginSymbol = "DOTUSDT" + + logrus.SetLevel(logrus.ErrorLevel) + db, err := prepareDB(t) + + assert.NoError(t, err) + + if err != nil { + t.Fail() + return + } + + defer db.Close() + + ctx := context.Background() + + dbx := sqlx.NewDb(db.DB, "sqlite3") + service := &MarginService{DB: dbx} + + logrus.SetLevel(logrus.DebugLevel) + err = service.Sync(ctx, ex, "USDT", time.Date(2022, time.February, 1, 0, 0, 0, 0, time.UTC)) + assert.NoError(t, err) + + // sync second time to ensure that we can query records + err = service.Sync(ctx, ex, "USDT", time.Date(2022, time.February, 1, 0, 0, 0, 0, time.UTC)) + assert.NoError(t, err) +} diff --git a/pkg/service/memory.go b/pkg/service/memory.go new file mode 100644 index 0000000000..92ee9f6cdc --- /dev/null +++ b/pkg/service/memory.go @@ -0,0 +1,51 @@ +package service + +import ( + "reflect" + "strings" +) + +type MemoryService struct { + Slots map[string]interface{} +} + +func NewMemoryService() *MemoryService { + return &MemoryService{ + Slots: make(map[string]interface{}), + } +} + +func (s *MemoryService) NewStore(id string, subIDs ...string) Store { + key := strings.Join(append([]string{id}, subIDs...), ":") + return &MemoryStore{ + Key: key, + memory: s, + } +} + +type MemoryStore struct { + Key string + memory *MemoryService +} + +func (store *MemoryStore) Save(val interface{}) error { + store.memory.Slots[store.Key] = val + return nil +} + +func (store *MemoryStore) Load(val interface{}) error { + v := reflect.ValueOf(val) + if data, ok := store.memory.Slots[store.Key]; ok { + dataRV := reflect.ValueOf(data) + v.Elem().Set(dataRV) + } else { + return ErrPersistenceNotExists + } + + return nil +} + +func (store *MemoryStore) Reset() error { + delete(store.memory.Slots, store.Key) + return nil +} diff --git a/pkg/service/memory_test.go b/pkg/service/memory_test.go new file mode 100644 index 0000000000..e6106d78b0 --- /dev/null +++ b/pkg/service/memory_test.go @@ -0,0 +1,33 @@ +package service + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestMemoryService(t *testing.T) { + t.Run("load_empty", func(t *testing.T) { + service := NewMemoryService() + store := service.NewStore("test") + + j := 0 + err := store.Load(&j) + assert.Error(t, err) + }) + + t.Run("save_and_load", func(t *testing.T) { + service := NewMemoryService() + store := service.NewStore("test") + + i := 3 + err := store.Save(i) + + assert.NoError(t, err) + + var j = 0 + err = store.Load(&j) + assert.NoError(t, err) + assert.Equal(t, i, j) + }) +} diff --git a/pkg/service/order.go b/pkg/service/order.go index f8080fad4e..b8cb8295fb 100644 --- a/pkg/service/order.go +++ b/pkg/service/order.go @@ -1,10 +1,16 @@ package service import ( + "context" + "strconv" + "strings" + "time" + + sq "github.com/Masterminds/squirrel" "github.com/jmoiron/sqlx" - "github.com/pkg/errors" log "github.com/sirupsen/logrus" + "github.com/c9s/bbgo/pkg/exchange/batch" "github.com/c9s/bbgo/pkg/types" ) @@ -12,40 +18,105 @@ type OrderService struct { DB *sqlx.DB } -// QueryLast queries the last order from the database -func (s *OrderService) QueryLast(ex types.ExchangeName, symbol string, isMargin bool, isIsolated bool) (*types.Order, error) { - log.Infof("querying last order exchange = %s AND symbol = %s AND is_margin = %v AND is_isolated = %v", ex, symbol, isMargin, isIsolated) +func (s *OrderService) Sync(ctx context.Context, exchange types.Exchange, symbol string, startTime time.Time) error { + isMargin, isFutures, isIsolated, isolatedSymbol := getExchangeAttributes(exchange) + // override symbol if isolatedSymbol is not empty + if isIsolated && len(isolatedSymbol) > 0 { + symbol = isolatedSymbol + } - rows, err := s.DB.NamedQuery(`SELECT * FROM orders WHERE exchange = :exchange AND symbol = :symbol AND is_margin = :is_margin AND is_isolated = :is_isolated ORDER BY gid DESC LIMIT 1`, map[string]interface{}{ - "exchange": ex, - "symbol": symbol, - "is_margin": isMargin, - "is_isolated": isIsolated, - }) + api, ok := exchange.(types.ExchangeTradeHistoryService) + if !ok { + return nil + } - if err != nil { - return nil, errors.Wrap(err, "query last order error") + lastOrderID := uint64(0) + tasks := []SyncTask{ + { + Type: types.Order{}, + Time: func(obj interface{}) time.Time { + return obj.(types.Order).CreationTime.Time() + }, + ID: func(obj interface{}) string { + order := obj.(types.Order) + return strconv.FormatUint(order.OrderID, 10) + }, + Select: SelectLastOrders(exchange.Name(), symbol, isMargin, isFutures, isIsolated, 100), + OnLoad: func(objs interface{}) { + // update last order ID + orders := objs.([]types.Order) + if len(orders) > 0 { + end := len(orders) - 1 + last := orders[end] + lastOrderID = last.OrderID + } + }, + BatchQuery: func(ctx context.Context, startTime, endTime time.Time) (interface{}, chan error) { + query := &batch.ClosedOrderBatchQuery{ + ExchangeTradeHistoryService: api, + } + + return query.Query(ctx, symbol, startTime, endTime, lastOrderID) + }, + Filter: func(obj interface{}) bool { + // skip canceled and not filled orders + order := obj.(types.Order) + if order.Status == types.OrderStatusCanceled && order.ExecutedQuantity.IsZero() { + return false + } + + return true + }, + Insert: func(obj interface{}) error { + order := obj.(types.Order) + return s.Insert(order) + }, + LogInsert: true, + }, } - if rows.Err() != nil { - return nil, rows.Err() + for _, sel := range tasks { + if err := sel.execute(ctx, s.DB, startTime); err != nil { + return err + } } - defer rows.Close() + return nil +} - if rows.Next() { - var order types.Order - err = rows.StructScan(&order) - return &order, err - } +func SelectLastOrders(ex types.ExchangeName, symbol string, isMargin, isFutures, isIsolated bool, limit uint64) sq.SelectBuilder { + return sq.Select("*"). + From("orders"). + Where(sq.And{ + sq.Eq{"symbol": symbol}, + sq.Eq{"exchange": ex}, + sq.Eq{"is_margin": isMargin}, + sq.Eq{"is_futures": isFutures}, + sq.Eq{"is_isolated": isIsolated}, + }). + OrderBy("gid DESC"). + Limit(limit) +} - return nil, rows.Err() +type AggOrder struct { + types.Order + AveragePrice *float64 `json:"averagePrice" db:"average_price"` } -func (s *OrderService) Query(ex types.ExchangeName, symbol string) ([]types.Order, error) { - rows, err := s.DB.NamedQuery(`SELECT * FROM orders WHERE exchange = :exchange AND symbol = :symbol ORDER BY gid ASC`, map[string]interface{}{ - "exchange": ex, - "symbol": symbol, +type QueryOrdersOptions struct { + Exchange types.ExchangeName + Symbol string + LastGID int64 + Ordering string +} + +func (s *OrderService) Query(options QueryOrdersOptions) ([]AggOrder, error) { + sql := genOrderSQL(options) + + rows, err := s.DB.NamedQuery(sql, map[string]interface{}{ + "exchange": options.Exchange, + "symbol": options.Symbol, + "gid": options.LastGID, }) if err != nil { return nil, err @@ -53,7 +124,59 @@ func (s *OrderService) Query(ex types.ExchangeName, symbol string) ([]types.Orde defer rows.Close() - return s.scanRows(rows) + return s.scanAggRows(rows) +} + +func genOrderSQL(options QueryOrdersOptions) string { + // ascending + ordering := "ASC" + switch v := strings.ToUpper(options.Ordering); v { + case "DESC", "ASC": + ordering = options.Ordering + } + + var where []string + if options.LastGID > 0 { + switch ordering { + case "ASC": + where = append(where, "gid > :gid") + case "DESC": + where = append(where, "gid < :gid") + + } + } + + if len(options.Exchange) > 0 { + where = append(where, "exchange = :exchange") + } + if len(options.Symbol) > 0 { + where = append(where, "symbol = :symbol") + } + + sql := `SELECT orders.*, IFNULL(SUM(t.price * t.quantity)/SUM(t.quantity), orders.price) AS average_price FROM orders` + + ` LEFT JOIN trades AS t ON (t.order_id = orders.order_id)` + if len(where) > 0 { + sql += ` WHERE ` + strings.Join(where, " AND ") + } + sql += ` GROUP BY orders.gid ` + sql += ` ORDER BY orders.gid ` + ordering + sql += ` LIMIT ` + strconv.Itoa(500) + + log.Info(sql) + return sql +} + +func (s *OrderService) scanAggRows(rows *sqlx.Rows) (orders []AggOrder, err error) { + for rows.Next() { + var order AggOrder + if err := rows.StructScan(&order); err != nil { + return nil, err + } + + orders = append(orders, order) + } + + return orders, rows.Err() } func (s *OrderService) scanRows(rows *sqlx.Rows) (orders []types.Order, err error) { @@ -69,10 +192,19 @@ func (s *OrderService) scanRows(rows *sqlx.Rows) (orders []types.Order, err erro return orders, rows.Err() } -func (s *OrderService) Insert(order types.Order) error { - _, err := s.DB.NamedExec(` - INSERT INTO orders (exchange, order_id, client_order_id, order_type, status, symbol, price, stop_price, quantity, executed_quantity, side, is_working, time_in_force, created_at, updated_at, is_margin, is_isolated) - VALUES (:exchange, :order_id, :client_order_id, :order_type, :status, :symbol, :price, :stop_price, :quantity, :executed_quantity, :side, :is_working, :time_in_force, :created_at, :updated_at, :is_margin, :is_isolated) +func (s *OrderService) Insert(order types.Order) (err error) { + if s.DB.DriverName() == "mysql" { + _, err = s.DB.NamedExec(` + INSERT INTO orders (exchange, order_id, client_order_id, order_type, status, symbol, price, stop_price, quantity, executed_quantity, side, is_working, time_in_force, created_at, updated_at, is_margin, is_futures, is_isolated) + VALUES (:exchange, :order_id, :client_order_id, :order_type, :status, :symbol, :price, :stop_price, :quantity, :executed_quantity, :side, :is_working, :time_in_force, :created_at, :updated_at, :is_margin, :is_futures, :is_isolated) ON DUPLICATE KEY UPDATE status=:status, executed_quantity=:executed_quantity, is_working=:is_working, updated_at=:updated_at`, order) + return err + } + + _, err = s.DB.NamedExec(` + INSERT INTO orders (exchange, order_id, client_order_id, order_type, status, symbol, price, stop_price, quantity, executed_quantity, side, is_working, time_in_force, created_at, updated_at, is_margin, is_futures, is_isolated) + VALUES (:exchange, :order_id, :client_order_id, :order_type, :status, :symbol, :price, :stop_price, :quantity, :executed_quantity, :side, :is_working, :time_in_force, :created_at, :updated_at, :is_margin, :is_futures, :is_isolated) + `, order) + return err } diff --git a/pkg/service/order_test.go b/pkg/service/order_test.go new file mode 100644 index 0000000000..d7efd534e7 --- /dev/null +++ b/pkg/service/order_test.go @@ -0,0 +1,24 @@ +package service + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_genOrderSQL(t *testing.T) { + t.Run("accept empty options", func(t *testing.T) { + o := QueryOrdersOptions{} + assert.Equal(t, "SELECT orders.*, IFNULL(SUM(t.price * t.quantity)/SUM(t.quantity), orders.price) AS average_price FROM orders LEFT JOIN trades AS t ON (t.order_id = orders.order_id) GROUP BY orders.gid ORDER BY orders.gid ASC LIMIT 500", genOrderSQL(o)) + }) + + t.Run("different ordering ", func(t *testing.T) { + o := QueryOrdersOptions{} + assert.Equal(t, "SELECT orders.*, IFNULL(SUM(t.price * t.quantity)/SUM(t.quantity), orders.price) AS average_price FROM orders LEFT JOIN trades AS t ON (t.order_id = orders.order_id) GROUP BY orders.gid ORDER BY orders.gid ASC LIMIT 500", genOrderSQL(o)) + o.Ordering = "ASC" + assert.Equal(t, "SELECT orders.*, IFNULL(SUM(t.price * t.quantity)/SUM(t.quantity), orders.price) AS average_price FROM orders LEFT JOIN trades AS t ON (t.order_id = orders.order_id) GROUP BY orders.gid ORDER BY orders.gid ASC LIMIT 500", genOrderSQL(o)) + o.Ordering = "DESC" + assert.Equal(t, "SELECT orders.*, IFNULL(SUM(t.price * t.quantity)/SUM(t.quantity), orders.price) AS average_price FROM orders LEFT JOIN trades AS t ON (t.order_id = orders.order_id) GROUP BY orders.gid ORDER BY orders.gid DESC LIMIT 500", genOrderSQL(o)) + }) + +} diff --git a/pkg/service/persistence.go b/pkg/service/persistence.go new file mode 100644 index 0000000000..e8cb47c359 --- /dev/null +++ b/pkg/service/persistence.go @@ -0,0 +1,22 @@ +package service + +type PersistenceService interface { + NewStore(id string, subIDs ...string) Store +} + +type Store interface { + Load(val interface{}) error + Save(val interface{}) error + Reset() error +} + +type RedisPersistenceConfig struct { + Host string `yaml:"host" json:"host" env:"REDIS_HOST"` + Port string `yaml:"port" json:"port" env:"REDIS_PORT"` + Password string `yaml:"password,omitempty" json:"password,omitempty" env:"REDIS_PASSWORD"` + DB int `yaml:"db" json:"db" env:"REDIS_DB"` +} + +type JsonPersistenceConfig struct { + Directory string `yaml:"directory" json:"directory"` +} diff --git a/pkg/service/persistence_facade.go b/pkg/service/persistence_facade.go new file mode 100644 index 0000000000..2adce0eb20 --- /dev/null +++ b/pkg/service/persistence_facade.go @@ -0,0 +1,21 @@ +package service + +type PersistenceServiceFacade struct { + Redis *RedisPersistenceService + Json *JsonPersistenceService + Memory *MemoryService +} + +// Get returns the preferred persistence service by fallbacks +// Redis will be preferred at the first position. +func (facade *PersistenceServiceFacade) Get() PersistenceService { + if facade.Redis != nil { + return facade.Redis + } + + if facade.Json != nil { + return facade.Json + } + + return facade.Memory +} diff --git a/pkg/service/persistence_json.go b/pkg/service/persistence_json.go new file mode 100644 index 0000000000..3bea745567 --- /dev/null +++ b/pkg/service/persistence_json.go @@ -0,0 +1,78 @@ +package service + +import ( + "encoding/json" + "io/ioutil" + "os" + "path/filepath" +) + +type JsonPersistenceService struct { + Directory string +} + +func (s *JsonPersistenceService) NewStore(id string, subIDs ...string) Store { + return &JsonStore{ + ID: id, + Directory: filepath.Join(append([]string{s.Directory}, subIDs...)...), + } +} + +type JsonStore struct { + ID string + Directory string +} + +func (store JsonStore) Reset() error { + if _, err := os.Stat(store.Directory); os.IsNotExist(err) { + return nil + } + + p := filepath.Join(store.Directory, store.ID) + ".json" + if _, err := os.Stat(p); os.IsNotExist(err) { + return nil + } + + return os.Remove(p) +} + +func (store JsonStore) Load(val interface{}) error { + if _, err := os.Stat(store.Directory); os.IsNotExist(err) { + if err2 := os.MkdirAll(store.Directory, 0777); err2 != nil { + return err2 + } + } + + p := filepath.Join(store.Directory, store.ID) + ".json" + + if _, err := os.Stat(p); os.IsNotExist(err) { + return ErrPersistenceNotExists + } + + data, err := ioutil.ReadFile(p) + if err != nil { + return err + } + + if len(data) == 0 { + return ErrPersistenceNotExists + } + + return json.Unmarshal(data, val) +} + +func (store JsonStore) Save(val interface{}) error { + if _, err := os.Stat(store.Directory); os.IsNotExist(err) { + if err2 := os.MkdirAll(store.Directory, 0777); err2 != nil { + return err2 + } + } + + data, err := json.Marshal(val) + if err != nil { + return err + } + + p := filepath.Join(store.Directory, store.ID) + ".json" + return ioutil.WriteFile(p, data, 0666) +} diff --git a/pkg/service/persistence_redis.go b/pkg/service/persistence_redis.go new file mode 100644 index 0000000000..6b91d05832 --- /dev/null +++ b/pkg/service/persistence_redis.go @@ -0,0 +1,96 @@ +package service + +import ( + "context" + "encoding/json" + "errors" + "net" + "strings" + + "github.com/go-redis/redis/v8" + log "github.com/sirupsen/logrus" +) + +type RedisPersistenceService struct { + redis *redis.Client +} + +func NewRedisPersistenceService(config *RedisPersistenceConfig) *RedisPersistenceService { + client := redis.NewClient(&redis.Options{ + Addr: net.JoinHostPort(config.Host, config.Port), + // Username: "", // username is only for redis 6.0 + // pragma: allowlist nextline secret + Password: config.Password, // no password set + DB: config.DB, // use default DB + }) + + return &RedisPersistenceService{ + redis: client, + } +} + +func (s *RedisPersistenceService) NewStore(id string, subIDs ...string) Store { + if len(subIDs) > 0 { + id += ":" + strings.Join(subIDs, ":") + } + + return &RedisStore{ + redis: s.redis, + ID: id, + } +} + +type RedisStore struct { + redis *redis.Client + + ID string +} + +func (store *RedisStore) Load(val interface{}) error { + if store.redis == nil { + return errors.New("can not load from redis, possible cause: redis persistence is not configured, or you are trying to use redis in back-test") + } + + cmd := store.redis.Get(context.Background(), store.ID) + data, err := cmd.Result() + + log.Debugf("[redis] get key %q, data = %s", store.ID, string(data)) + + if err != nil { + if err == redis.Nil { + return ErrPersistenceNotExists + } + + return err + } + + // skip null data + if len(data) == 0 || data == "null" { + return ErrPersistenceNotExists + } + + return json.Unmarshal([]byte(data), val) +} + +func (store *RedisStore) Save(val interface{}) error { + if val == nil { + return nil + } + + data, err := json.Marshal(val) + if err != nil { + return err + } + + cmd := store.redis.Set(context.Background(), store.ID, data, 0) + _, err = cmd.Result() + + log.Debugf("[redis] set key %q, data = %s", store.ID, string(data)) + + return err +} + +func (store *RedisStore) Reset() error { + _, err := store.redis.Del(context.Background(), store.ID).Result() + return err +} diff --git a/pkg/bbgo/redis_persistence_test.go b/pkg/service/persistence_redis_test.go similarity index 63% rename from pkg/bbgo/redis_persistence_test.go rename to pkg/service/persistence_redis_test.go index 4c8b0679b1..ff1d0f0d6d 100644 --- a/pkg/bbgo/redis_persistence_test.go +++ b/pkg/service/persistence_redis_test.go @@ -1,4 +1,4 @@ -package bbgo +package service import ( "testing" @@ -39,29 +39,3 @@ func TestRedisPersistentService(t *testing.T) { err = store.Reset() assert.NoError(t, err) } - -func TestMemoryService(t *testing.T) { - t.Run("load_empty", func(t *testing.T) { - service := NewMemoryService() - store := service.NewStore("test") - - j := 0 - err := store.Load(&j) - assert.Error(t, err) - }) - - t.Run("save_and_load", func(t *testing.T) { - service := NewMemoryService() - store := service.NewStore("test") - - i := 3 - err := store.Save(&i) - - assert.NoError(t, err) - - var j = 0 - err = store.Load(&j) - assert.NoError(t, err) - assert.Equal(t, i, j) - }) -} diff --git a/pkg/service/position.go b/pkg/service/position.go new file mode 100644 index 0000000000..5fb38eaa75 --- /dev/null +++ b/pkg/service/position.go @@ -0,0 +1,101 @@ +package service + +import ( + "context" + + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +type PositionService struct { + DB *sqlx.DB +} + +func NewPositionService(db *sqlx.DB) *PositionService { + return &PositionService{db} +} + +func (s *PositionService) Load(ctx context.Context, id int64) (*types.Position, error) { + var pos types.Position + + rows, err := s.DB.NamedQuery("SELECT * FROM positions WHERE id = :id", map[string]interface{}{ + "id": id, + }) + if err != nil { + return nil, err + } + + defer rows.Close() + + if rows.Next() { + err = rows.StructScan(&pos) + return &pos, err + } + + return nil, errors.Wrapf(ErrTradeNotFound, "position id:%d not found", id) +} + +func (s *PositionService) scanRows(rows *sqlx.Rows) (positions []types.Position, err error) { + for rows.Next() { + var p types.Position + if err := rows.StructScan(&p); err != nil { + return positions, err + } + + positions = append(positions, p) + } + + return positions, rows.Err() +} + +func (s *PositionService) Insert(position *types.Position, trade types.Trade, profit fixedpoint.Value) error { + _, err := s.DB.NamedExec(` + INSERT INTO positions ( + strategy, + strategy_instance_id, + symbol, + quote_currency, + base_currency, + average_cost, + base, + quote, + profit, + trade_id, + exchange, + side, + traded_at + ) VALUES ( + :strategy, + :strategy_instance_id, + :symbol, + :quote_currency, + :base_currency, + :average_cost, + :base, + :quote, + :profit, + :trade_id, + :exchange, + :side, + :traded_at + )`, + map[string]interface{}{ + "strategy": position.Strategy, + "strategy_instance_id": position.StrategyInstanceID, + "symbol": position.Symbol, + "quote_currency": position.QuoteCurrency, + "base_currency": position.BaseCurrency, + "average_cost": position.AverageCost, + "base": position.Base, + "quote": position.Quote, + "profit": profit, + "trade_id": trade.ID, + "exchange": trade.Exchange, + "side": trade.Side, + "traded_at": trade.Time, + }) + return err +} diff --git a/pkg/service/position_test.go b/pkg/service/position_test.go new file mode 100644 index 0000000000..b45a95fcf7 --- /dev/null +++ b/pkg/service/position_test.go @@ -0,0 +1,61 @@ +package service + +import ( + "testing" + "time" + + "github.com/jmoiron/sqlx" + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func TestPositionService(t *testing.T) { + db, err := prepareDB(t) + if err != nil { + t.Fatal(err) + } + + defer func() { + err := db.Close() + assert.NoError(t, err) + }() + + xdb := sqlx.NewDb(db.DB, "sqlite3") + service := &PositionService{DB: xdb} + + t.Run("minimal fields", func(t *testing.T) { + err = service.Insert(&types.Position{ + Symbol: "BTCUSDT", + BaseCurrency: "BTC", + QuoteCurrency: "USDT", + AverageCost: fixedpoint.NewFromFloat(44000), + ChangedAt: time.Now(), + }, types.Trade{ + Time: types.Time(time.Now()), + }, fixedpoint.Zero) + assert.NoError(t, err) + }) + + t.Run("full fields", func(t *testing.T) { + err = service.Insert(&types.Position{ + Symbol: "BTCUSDT", + BaseCurrency: "BTC", + QuoteCurrency: "USDT", + AverageCost: fixedpoint.NewFromFloat(44000), + Base: fixedpoint.NewFromFloat(0.1), + Quote: fixedpoint.NewFromFloat(-44000.0), + ChangedAt: time.Now(), + Strategy: "bollmaker", + StrategyInstanceID: "bollmaker-BTCUSDT-1m", + }, types.Trade{ + ID: 9, + Exchange: types.ExchangeBinance, + Side: types.SideTypeSell, + Time: types.Time(time.Now()), + }, fixedpoint.NewFromFloat(10.9)) + assert.NoError(t, err) + }) + +} diff --git a/pkg/service/profit.go b/pkg/service/profit.go new file mode 100644 index 0000000000..9396e69535 --- /dev/null +++ b/pkg/service/profit.go @@ -0,0 +1,106 @@ +package service + +import ( + "context" + + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" + + "github.com/c9s/bbgo/pkg/types" +) + +type ProfitService struct { + DB *sqlx.DB +} + +func (s *ProfitService) Load(ctx context.Context, id int64) (*types.Trade, error) { + var trade types.Trade + + rows, err := s.DB.NamedQuery("SELECT * FROM trades WHERE id = :id", map[string]interface{}{ + "id": id, + }) + if err != nil { + return nil, err + } + + defer rows.Close() + + if rows.Next() { + err = rows.StructScan(&trade) + return &trade, err + } + + return nil, errors.Wrapf(ErrTradeNotFound, "trade id:%d not found", id) +} + +func (s *ProfitService) scanRows(rows *sqlx.Rows) (profits []types.Profit, err error) { + for rows.Next() { + var profit types.Profit + if err := rows.StructScan(&profit); err != nil { + return profits, err + } + + profits = append(profits, profit) + } + + return profits, rows.Err() +} + +func (s *ProfitService) Insert(profit types.Profit) error { + _, err := s.DB.NamedExec(` + INSERT INTO profits ( + strategy, + strategy_instance_id, + symbol, + quote_currency, + base_currency, + average_cost, + profit, + net_profit, + profit_margin, + net_profit_margin, + trade_id, + price, + quantity, + quote_quantity, + side, + is_buyer, + is_maker, + fee, + fee_currency, + fee_in_usd, + traded_at, + exchange, + is_margin, + is_futures, + is_isolated + ) VALUES ( + :strategy, + :strategy_instance_id, + :symbol, + :quote_currency, + :base_currency, + :average_cost, + :profit, + :net_profit, + :profit_margin, + :net_profit_margin, + :trade_id, + :price, + :quantity, + :quote_quantity, + :side, + :is_buyer, + :is_maker, + :fee, + :fee_currency, + :fee_in_usd, + :traded_at, + :exchange, + :is_margin, + :is_futures, + :is_isolated + )`, + profit) + return err +} diff --git a/pkg/service/profit_test.go b/pkg/service/profit_test.go new file mode 100644 index 0000000000..e4616a80ea --- /dev/null +++ b/pkg/service/profit_test.go @@ -0,0 +1,41 @@ +package service + +import ( + "testing" + "time" + + "github.com/jmoiron/sqlx" + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func TestProfitService(t *testing.T) { + db, err := prepareDB(t) + if err != nil { + t.Fatal(err) + } + + defer db.Close() + + xdb := sqlx.NewDb(db.DB, "sqlite3") + service := &ProfitService{DB: xdb} + + err = service.Insert(types.Profit{ + Symbol: "BTCUSDT", + BaseCurrency: "BTC", + QuoteCurrency: "USDT", + AverageCost: fixedpoint.NewFromFloat(44000), + Profit: fixedpoint.NewFromFloat(1.01), + NetProfit: fixedpoint.NewFromFloat(0.98), + TradeID: 99, + Side: types.SideTypeSell, + Price: fixedpoint.NewFromFloat(44300), + Quantity: fixedpoint.NewFromFloat(0.001), + QuoteQuantity: fixedpoint.NewFromFloat(44.0), + Exchange: types.ExchangeMax, + TradedAt: time.Now(), + }) + assert.NoError(t, err) +} diff --git a/pkg/service/reflect.go b/pkg/service/reflect.go new file mode 100644 index 0000000000..b60c0c3371 --- /dev/null +++ b/pkg/service/reflect.go @@ -0,0 +1,231 @@ +package service + +import ( + "context" + "reflect" + "strings" + + "github.com/Masterminds/squirrel" + "github.com/fatih/camelcase" + gopluralize "github.com/gertd/go-pluralize" + "github.com/jmoiron/sqlx" + "github.com/sirupsen/logrus" +) + +var pluralize = gopluralize.NewClient() + +func tableNameOf(record interface{}) string { + rt := reflect.TypeOf(record) + if rt.Kind() == reflect.Ptr { + rt = rt.Elem() + } + + typeName := rt.Name() + tableName := strings.Join(camelcase.Split(typeName), "_") + tableName = strings.ToLower(tableName) + return pluralize.Plural(tableName) +} + +func placeholdersOf(record interface{}) []string { + rt := reflect.TypeOf(record) + if rt.Kind() == reflect.Ptr { + rt = rt.Elem() + } + + if rt.Kind() != reflect.Struct { + return nil + } + + var dbFields []string + for i := 0; i < rt.NumField(); i++ { + fieldType := rt.Field(i) + if tag, ok := fieldType.Tag.Lookup("db"); ok { + if tag == "gid" { + continue + } + + dbFields = append(dbFields, ":"+tag) + } + } + + return dbFields +} + +func fieldsNamesOf(record interface{}) []string { + rt := reflect.TypeOf(record) + if rt.Kind() == reflect.Ptr { + rt = rt.Elem() + } + + if rt.Kind() != reflect.Struct { + return nil + } + + var dbFields []string + for i := 0; i < rt.NumField(); i++ { + fieldType := rt.Field(i) + if tag, ok := fieldType.Tag.Lookup("db"); ok { + if tag == "gid" { + continue + } + + dbFields = append(dbFields, tag) + } + } + + return dbFields +} + +func ParseStructTag(s string) (string, map[string]string) { + opts := make(map[string]string) + ss := strings.Split(s, ",") + if len(ss) > 1 { + for _, opt := range ss[1:] { + aa := strings.SplitN(opt, "=", 2) + if len(aa) == 2 { + opts[aa[0]] = aa[1] + } else { + opts[aa[0]] = "" + } + } + } + + return ss[0], opts +} + +type ReflectCache struct { + tableNames map[string]string + fields map[string][]string + placeholders map[string][]string + insertSqls map[string]string +} + +func NewReflectCache() *ReflectCache { + return &ReflectCache{ + tableNames: make(map[string]string), + fields: make(map[string][]string), + placeholders: make(map[string][]string), + insertSqls: make(map[string]string), + } +} + +func (c *ReflectCache) InsertSqlOf(t interface{}) string { + rt := reflect.TypeOf(t) + if rt.Kind() == reflect.Ptr { + rt = rt.Elem() + } + + typeName := rt.Name() + sql, ok := c.insertSqls[typeName] + if ok { + return sql + } + + tableName := dbCache.TableNameOf(t) + fields := dbCache.FieldsOf(t) + placeholders := dbCache.PlaceholderOf(t) + fieldClause := strings.Join(fields, ", ") + placeholderClause := strings.Join(placeholders, ", ") + + sql = `INSERT INTO ` + tableName + ` (` + fieldClause + `) VALUES (` + placeholderClause + `)` + c.insertSqls[typeName] = sql + return sql +} + +func (c *ReflectCache) TableNameOf(t interface{}) string { + rt := reflect.TypeOf(t) + if rt.Kind() == reflect.Ptr { + rt = rt.Elem() + } + + typeName := rt.Name() + tableName, ok := c.tableNames[typeName] + if ok { + return tableName + } + + tableName = tableNameOf(t) + c.tableNames[typeName] = tableName + return tableName +} + +func (c *ReflectCache) PlaceholderOf(t interface{}) []string { + rt := reflect.TypeOf(t) + if rt.Kind() == reflect.Ptr { + rt = rt.Elem() + } + typeName := rt.Name() + placeholders, ok := c.placeholders[typeName] + if ok { + return placeholders + } + + placeholders = placeholdersOf(t) + c.placeholders[typeName] = placeholders + return placeholders +} + +func (c *ReflectCache) FieldsOf(t interface{}) []string { + rt := reflect.TypeOf(t) + if rt.Kind() == reflect.Ptr { + rt = rt.Elem() + } + + typeName := rt.Name() + fields, ok := c.fields[typeName] + if ok { + return fields + } + + fields = fieldsNamesOf(t) + c.fields[typeName] = fields + return fields +} + +// scanRowsOfType use the given type to scan rows +// this is usually slower than the native one since it uses reflect. +func scanRowsOfType(rows *sqlx.Rows, tpe interface{}) (interface{}, error) { + refType := reflect.TypeOf(tpe) + + if refType.Kind() == reflect.Ptr { + refType = refType.Elem() + } + + sliceRef := reflect.MakeSlice(reflect.SliceOf(refType), 0, 100) + // sliceRef := reflect.New(reflect.SliceOf(refType)) + for rows.Next() { + var recordRef = reflect.New(refType) + var record = recordRef.Interface() + if err := rows.StructScan(record); err != nil { + return sliceRef.Interface(), err + } + + sliceRef = reflect.Append(sliceRef, recordRef.Elem()) + } + + return sliceRef.Interface(), rows.Err() +} + +func insertType(db *sqlx.DB, record interface{}) error { + sql := dbCache.InsertSqlOf(record) + _, err := db.NamedExec(sql, record) + return err +} + +func selectAndScanType(ctx context.Context, db *sqlx.DB, sel squirrel.SelectBuilder, tpe interface{}) (interface{}, error) { + sql, args, err := sel.ToSql() + if err != nil { + return nil, err + } + + logrus.Debugf("selectAndScanType: %T <- %s", tpe, sql) + logrus.Debugf("queryArgs: %v", args) + + rows, err := db.QueryxContext(ctx, sql, args...) + if err != nil { + return nil, err + } + + defer rows.Close() + return scanRowsOfType(rows, tpe) +} diff --git a/pkg/service/reflect_test.go b/pkg/service/reflect_test.go new file mode 100644 index 0000000000..9eb525ae94 --- /dev/null +++ b/pkg/service/reflect_test.go @@ -0,0 +1,71 @@ +package service + +import ( + "reflect" + "testing" + + "github.com/c9s/bbgo/pkg/types" +) + +func Test_tableNameOf(t *testing.T) { + type args struct { + record interface{} + } + tests := []struct { + name string + args args + want string + }{ + { + name: "MarginInterest", + args: args{record: &types.MarginInterest{}}, + want: "margin_interests", + }, + { + name: "MarginLoan", + args: args{record: &types.MarginLoan{}}, + want: "margin_loans", + }, + { + name: "MarginRepay", + args: args{record: &types.MarginRepay{}}, + want: "margin_repays", + }, + { + name: "MarginLiquidation", + args: args{record: &types.MarginLiquidation{}}, + want: "margin_liquidations", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := tableNameOf(tt.args.record); got != tt.want { + t.Errorf("tableNameOf() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_fieldsNamesOf(t *testing.T) { + type args struct { + record interface{} + } + tests := []struct { + name string + args args + want []string + }{ + { + name: "MarginInterest", + args: args{record: &types.MarginInterest{}}, + want: []string{"exchange", "asset", "principle", "interest", "interest_rate", "isolated_symbol", "time"}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := fieldsNamesOf(tt.args.record); !reflect.DeepEqual(got, tt.want) { + t.Errorf("fieldsNamesOf() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/service/reward.go b/pkg/service/reward.go new file mode 100644 index 0000000000..ef28f0ef8c --- /dev/null +++ b/pkg/service/reward.go @@ -0,0 +1,198 @@ +package service + +import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + sq "github.com/Masterminds/squirrel" + "github.com/jmoiron/sqlx" + + "github.com/c9s/bbgo/pkg/exchange/batch" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +// RewardService collects the reward records from the exchange, +// currently it's only available for MAX exchange. +// TODO: add summary query for calculating the reward amounts +// CREATE VIEW reward_summary_by_years AS SELECT YEAR(created_at) as year, reward_type, currency, SUM(quantity) FROM rewards WHERE reward_type != 'airdrop' GROUP BY YEAR(created_at), reward_type, currency ORDER BY year DESC; +type RewardService struct { + DB *sqlx.DB +} + +func (s *RewardService) Sync(ctx context.Context, exchange types.Exchange, startTime time.Time) error { + api, ok := exchange.(types.ExchangeRewardService) + if !ok { + return ErrExchangeRewardServiceNotImplemented + } + + isMargin, isFutures, _, _ := getExchangeAttributes(exchange) + if isMargin || isFutures { + return nil + } + + tasks := []SyncTask{ + { + Type: types.Reward{}, + Select: SelectLastRewards(exchange.Name(), 100), + BatchQuery: func(ctx context.Context, startTime, endTime time.Time) (interface{}, chan error) { + query := &batch.RewardBatchQuery{ + Service: api, + } + return query.Query(ctx, startTime, endTime) + }, + Time: func(obj interface{}) time.Time { + return obj.(types.Reward).CreatedAt.Time() + }, + ID: func(obj interface{}) string { + reward := obj.(types.Reward) + return string(reward.Type) + "_" + reward.UUID + }, + LogInsert: true, + }, + } + + for _, sel := range tasks { + if err := sel.execute(ctx, s.DB, startTime); err != nil { + return err + } + } + + return nil +} + +type CurrencyPositionMap map[string]fixedpoint.Value + +func (s *RewardService) AggregateUnspentCurrencyPosition(ctx context.Context, ex types.ExchangeName, since time.Time) (CurrencyPositionMap, error) { + m := make(CurrencyPositionMap) + + rewards, err := s.QueryUnspentSince(ctx, ex, since) + if err != nil { + return nil, err + } + + for _, reward := range rewards { + m[reward.Currency] = m[reward.Currency].Add(reward.Quantity) + } + + return m, nil +} + +func (s *RewardService) QueryUnspentSince(ctx context.Context, ex types.ExchangeName, since time.Time, rewardTypes ...types.RewardType) ([]types.Reward, error) { + sql := "SELECT * FROM rewards WHERE created_at >= :since AND exchange = :exchange AND spent IS FALSE " + + if len(rewardTypes) == 0 { + sql += " AND `reward_type` NOT IN ('airdrop') " + } else { + var args []string + for _, n := range rewardTypes { + args = append(args, strconv.Quote(string(n))) + } + sql += " AND `reward_type` IN (" + strings.Join(args, ", ") + ") " + } + + sql += " ORDER BY created_at ASC" + + rows, err := s.DB.NamedQueryContext(ctx, sql, map[string]interface{}{ + "exchange": ex, + "since": since, + }) + + if err != nil { + return nil, err + } + + defer rows.Close() + return s.scanRows(rows) +} + +func (s *RewardService) QueryUnspent(ctx context.Context, ex types.ExchangeName, rewardTypes ...types.RewardType) ([]types.Reward, error) { + sql := "SELECT * FROM rewards WHERE exchange = :exchange AND spent IS FALSE " + if len(rewardTypes) == 0 { + sql += " AND `reward_type` NOT IN ('airdrop') " + } else { + var args []string + for _, n := range rewardTypes { + args = append(args, strconv.Quote(string(n))) + } + sql += " AND `reward_type` IN (" + strings.Join(args, ", ") + ") " + } + + sql += " ORDER BY created_at ASC" + rows, err := s.DB.NamedQueryContext(ctx, sql, map[string]interface{}{ + "exchange": ex, + }) + if err != nil { + return nil, err + } + + defer rows.Close() + return s.scanRows(rows) +} + +func (s *RewardService) MarkCurrencyAsSpent(ctx context.Context, currency string) error { + result, err := s.DB.NamedExecContext(ctx, "UPDATE `rewards` SET `spent` = TRUE WHERE `currency` = :currency AND `spent` IS FALSE", map[string]interface{}{ + "currency": currency, + }) + + if err != nil { + return err + } + + _, err = result.RowsAffected() + return err +} + +func (s *RewardService) MarkAsSpent(ctx context.Context, uuid string) error { + result, err := s.DB.NamedExecContext(ctx, "UPDATE `rewards` SET `spent` = TRUE WHERE `uuid` = :uuid", map[string]interface{}{ + "uuid": uuid, + }) + if err != nil { + return err + } + + cnt, err := result.RowsAffected() + if err != nil { + return err + } + + if cnt == 0 { + return fmt.Errorf("reward uuid:%s not found", uuid) + } + + return nil +} + +func (s *RewardService) scanRows(rows *sqlx.Rows) (rewards []types.Reward, err error) { + for rows.Next() { + var reward types.Reward + if err := rows.StructScan(&reward); err != nil { + return rewards, err + } + + rewards = append(rewards, reward) + } + + return rewards, rows.Err() +} + +func (s *RewardService) Insert(reward types.Reward) error { + _, err := s.DB.NamedExec(` + INSERT INTO rewards (exchange, uuid, reward_type, currency, quantity, state, note, created_at) + VALUES (:exchange, :uuid, :reward_type, :currency, :quantity, :state, :note, :created_at)`, + reward) + return err +} + +func SelectLastRewards(ex types.ExchangeName, limit uint64) sq.SelectBuilder { + return sq.Select("*"). + From("rewards"). + Where(sq.And{ + sq.Eq{"exchange": ex}, + }). + OrderBy("created_at DESC"). + Limit(limit) +} diff --git a/pkg/service/reward_test.go b/pkg/service/reward_test.go new file mode 100644 index 0000000000..2485d81998 --- /dev/null +++ b/pkg/service/reward_test.go @@ -0,0 +1,140 @@ +package service + +import ( + "context" + "testing" + "time" + + "github.com/jmoiron/sqlx" + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func TestRewardService_InsertAndQueryUnspent(t *testing.T) { + db, err := prepareDB(t) + if err != nil { + t.Fatal(err) + } + + defer db.Close() + + ctx := context.Background() + + xdb := sqlx.NewDb(db.DB, "sqlite3") + service := &RewardService{DB: xdb} + + err = service.Insert(types.Reward{ + UUID: "test01", + Exchange: "max", + Type: "commission", + Currency: "BTC", + Quantity: fixedpoint.One, + State: "done", + Spent: false, + CreatedAt: types.Time(time.Now()), + }) + assert.NoError(t, err) + + rewards, err := service.QueryUnspent(ctx, types.ExchangeMax) + assert.NoError(t, err) + assert.NotEmpty(t, rewards) + assert.Len(t, rewards, 1) + assert.Equal(t, types.RewardCommission, rewards[0].Type) + + err = service.Insert(types.Reward{ + UUID: "test02", + Exchange: "max", + Type: "airdrop", + Currency: "MAX", + Quantity: fixedpoint.NewFromInt(1000000), + State: "done", + Spent: false, + CreatedAt: types.Time(time.Now()), + }) + assert.NoError(t, err) + + rewards, err = service.QueryUnspent(ctx, types.ExchangeMax) + assert.NoError(t, err) + assert.NotEmpty(t, rewards) + assert.Len(t, rewards, 1, "airdrop should not be included") + assert.Equal(t, types.RewardCommission, rewards[0].Type) + + rewards, err = service.QueryUnspent(ctx, types.ExchangeMax, types.RewardAirdrop) + assert.NoError(t, err) + assert.NotEmpty(t, rewards) + assert.Len(t, rewards, 1, "airdrop should be included") + assert.Equal(t, types.RewardAirdrop, rewards[0].Type) + + rewards, err = service.QueryUnspent(ctx, types.ExchangeMax, types.RewardCommission) + assert.NoError(t, err) + assert.NotEmpty(t, rewards) + assert.Len(t, rewards, 1, "should select 1 reward") + assert.Equal(t, types.RewardCommission, rewards[0].Type) +} + +func TestRewardService_AggregateUnspentCurrencyPosition(t *testing.T) { + db, err := prepareDB(t) + if err != nil { + t.Fatal(err) + } + + defer db.Close() + + ctx := context.Background() + + xdb := sqlx.NewDb(db.DB, "sqlite3") + service := &RewardService{DB: xdb} + + now := time.Now() + + err = service.Insert(types.Reward{ + UUID: "test01", + Exchange: "max", + Type: "commission", + Currency: "BTC", + Quantity: fixedpoint.One, + State: "done", + Spent: false, + CreatedAt: types.Time(now), + }) + assert.NoError(t, err) + + err = service.Insert(types.Reward{ + UUID: "test02", + Exchange: "max", + Type: "commission", + Currency: "LTC", + Quantity: fixedpoint.NewFromInt(2), + State: "done", + Spent: false, + CreatedAt: types.Time(now), + }) + assert.NoError(t, err) + + err = service.Insert(types.Reward{ + UUID: "test03", + Exchange: "max", + Type: "airdrop", + Currency: "MAX", + Quantity: fixedpoint.NewFromInt(1000000), + State: "done", + Spent: false, + CreatedAt: types.Time(now), + }) + assert.NoError(t, err) + + currencyPositions, err := service.AggregateUnspentCurrencyPosition(ctx, types.ExchangeMax, now.Add(-10*time.Second)) + assert.NoError(t, err) + assert.NotEmpty(t, currencyPositions) + assert.Len(t, currencyPositions, 2) + + v, ok := currencyPositions["LTC"] + assert.True(t, ok) + assert.Equal(t, fixedpoint.NewFromInt(2), v) + + v, ok = currencyPositions["BTC"] + assert.True(t, ok) + assert.Equal(t, fixedpoint.One, v) +} diff --git a/pkg/service/sync.go b/pkg/service/sync.go index c28890d97f..aaf757ddb2 100644 --- a/pkg/service/sync.go +++ b/pkg/service/sync.go @@ -2,118 +2,112 @@ package service import ( "context" + "errors" "time" - "github.com/sirupsen/logrus" + "github.com/c9s/bbgo/pkg/cache" + + log "github.com/sirupsen/logrus" "github.com/c9s/bbgo/pkg/types" ) +var ErrNotImplemented = errors.New("not implemented") +var ErrExchangeRewardServiceNotImplemented = errors.New("exchange does not implement ExchangeRewardService interface") + type SyncService struct { - TradeService *TradeService - OrderService *OrderService + TradeService *TradeService + OrderService *OrderService + RewardService *RewardService + WithdrawService *WithdrawService + DepositService *DepositService + MarginService *MarginService } -func (s *SyncService) SyncOrders(ctx context.Context, exchange types.Exchange, symbol string, startTime time.Time) error { - isMargin := false - isIsolated := false - if marginExchange, ok := exchange.(types.MarginExchange); ok { - marginSettings := marginExchange.GetMarginSettings() - isMargin = marginSettings.IsMargin - isIsolated = marginSettings.IsIsolatedMargin - if marginSettings.IsIsolatedMargin { - symbol = marginSettings.IsolatedMarginSymbol - } - } - - lastOrder, err := s.OrderService.QueryLast(exchange.Name(), symbol, isMargin, isIsolated) +// SyncSessionSymbols syncs the trades from the given exchange session +func (s *SyncService) SyncSessionSymbols(ctx context.Context, exchange types.Exchange, startTime time.Time, symbols ...string) error { + markets, err := cache.LoadExchangeMarketsWithCache(ctx, exchange) if err != nil { return err } - var lastID uint64 = 0 - if lastOrder != nil { - lastID = lastOrder.OrderID - startTime = lastOrder.CreationTime - - logrus.Infof("found last order, start from lastID = %d since %s", lastID, startTime) - } - - batch := &types.ExchangeBatchProcessor{Exchange: exchange} - ordersC, errC := batch.BatchQueryClosedOrders(ctx, symbol, startTime, time.Now(), lastID) - for order := range ordersC { - select { - - case <-ctx.Done(): - return ctx.Err() + for _, symbol := range symbols { + if _, ok := markets[symbol]; ok { + log.Infof("syncing %s %s trades...", exchange.Name(), symbol) + if err := s.TradeService.Sync(ctx, exchange, symbol, startTime); err != nil { + return err + } - case err := <-errC: - if err != nil { + log.Infof("syncing %s %s orders...", exchange.Name(), symbol) + if err := s.OrderService.Sync(ctx, exchange, symbol, startTime); err != nil { return err } + } + } + + return nil +} - default: +func (s *SyncService) SyncMarginHistory(ctx context.Context, exchange types.Exchange, startTime time.Time, assets ...string) error { + if _, implemented := exchange.(types.MarginHistory); !implemented { + log.Debugf("exchange %T does not support types.MarginHistory", exchange) + return nil + } + if marginExchange, implemented := exchange.(types.MarginExchange); !implemented { + log.Debugf("exchange %T does not implement types.MarginExchange", exchange) + return nil + } else { + marginSettings := marginExchange.GetMarginSettings() + if !marginSettings.IsMargin { + log.Debugf("exchange %T is not using margin", exchange) + return nil } + } - if err := s.OrderService.Insert(order); err != nil { + log.Infof("syncing %s margin history: %v...", exchange.Name(), assets) + for _, asset := range assets { + if err := s.MarginService.Sync(ctx, exchange, asset, startTime); err != nil { return err } } - return <-errC + return nil } -func (s *SyncService) SyncTrades(ctx context.Context, exchange types.Exchange, symbol string, startTime time.Time) error { - isMargin := false - isIsolated := false - if marginExchange, ok := exchange.(types.MarginExchange); ok { - marginSettings := marginExchange.GetMarginSettings() - isMargin = marginSettings.IsMargin - isIsolated = marginSettings.IsIsolatedMargin - if marginSettings.IsIsolatedMargin { - symbol = marginSettings.IsolatedMarginSymbol - } +func (s *SyncService) SyncRewardHistory(ctx context.Context, exchange types.Exchange, startTime time.Time) error { + if _, implemented := exchange.(types.ExchangeRewardService); !implemented { + return nil } - lastTrade, err := s.TradeService.QueryLast(exchange.Name(), symbol, isMargin, isIsolated) - if err != nil { + log.Infof("syncing %s reward records...", exchange.Name()) + if err := s.RewardService.Sync(ctx, exchange, startTime); err != nil { return err } - var lastID int64 = 0 - if lastTrade != nil { - lastID = lastTrade.ID - startTime = lastTrade.Time + return nil +} - logrus.Infof("found last trade, start from lastID = %d since %s", lastID, startTime) +func (s *SyncService) SyncDepositHistory(ctx context.Context, exchange types.Exchange, startTime time.Time) error { + log.Infof("syncing %s deposit records...", exchange.Name()) + if err := s.DepositService.Sync(ctx, exchange, startTime); err != nil { + if err != ErrNotImplemented { + log.Warnf("%s deposit service is not supported", exchange.Name()) + return err + } } - batch := &types.ExchangeBatchProcessor{Exchange: exchange} - tradeC, errC := batch.BatchQueryTrades(ctx, symbol, &types.TradeQueryOptions{ - StartTime: &startTime, - Limit: 200, - LastTradeID: lastID, - }) - - for trade := range tradeC { - select { - case <-ctx.Done(): - return ctx.Err() - - case err := <-errC: - if err != nil { - return err - } - - default: - } + return nil +} - if err := s.TradeService.Insert(trade); err != nil { +func (s *SyncService) SyncWithdrawHistory(ctx context.Context, exchange types.Exchange, startTime time.Time) error { + log.Infof("syncing %s withdraw records...", exchange.Name()) + if err := s.WithdrawService.Sync(ctx, exchange, startTime); err != nil { + if err != ErrNotImplemented { + log.Warnf("%s withdraw service is not supported", exchange.Name()) return err } - } - return <-errC + return nil } diff --git a/pkg/service/sync_task.go b/pkg/service/sync_task.go new file mode 100644 index 0000000000..0727e0a765 --- /dev/null +++ b/pkg/service/sync_task.go @@ -0,0 +1,203 @@ +package service + +import ( + "context" + "reflect" + "sort" + "time" + + "github.com/Masterminds/squirrel" + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" +) + +// SyncTask defines the behaviors for syncing remote records +type SyncTask struct { + // Type is the element type of this sync task + // Since it will create a []Type slice from this type, you should not set pointer to this field + Type interface{} + + // ID is a function that returns the unique identity of the object + // This function will be used for detecting duplicated objects. + ID func(obj interface{}) string + + // Time is a function that returns the time of the object + // This function will be used for sorting records + Time func(obj interface{}) time.Time + + // Select is the select query builder for querying existing db records + // The built SQL will be used for querying existing db records. + // And then the ID function will be used for filtering duplicated object. + Select squirrel.SelectBuilder + + // OnLoad is an optional field, which is called when the records are loaded from the database + OnLoad func(objs interface{}) + + // Filter is an optional field, which is used for filtering the remote records + Filter func(obj interface{}) bool + + // BatchQuery is used for querying remote records. + BatchQuery func(ctx context.Context, startTime, endTime time.Time) (interface{}, chan error) + + // Insert is an option field, which is used for customizing the record insert + Insert func(obj interface{}) error + + // Insert is an option field, which is used for customizing the record batch insert + BatchInsert func(obj interface{}) error + + BatchInsertBuffer int + + // LogInsert logs the insert record in INFO level + LogInsert bool +} + +func (sel SyncTask) execute(ctx context.Context, db *sqlx.DB, startTime time.Time, args ...time.Time) error { + batchBufferRefVal := reflect.MakeSlice(reflect.SliceOf(reflect.TypeOf(sel.Type)), 0, sel.BatchInsertBuffer) + + // query from db + recordSlice, err := selectAndScanType(ctx, db, sel.Select, sel.Type) + if err != nil { + return err + } + + recordSliceRef := reflect.ValueOf(recordSlice) + if recordSliceRef.Kind() == reflect.Ptr { + recordSliceRef = recordSliceRef.Elem() + } + + logrus.Debugf("loaded %d %T records", recordSliceRef.Len(), sel.Type) + + ids := buildIdMap(sel, recordSliceRef) + + if err := sortRecords(sel, recordSliceRef); err != nil { + return err + } + + if sel.OnLoad != nil { + sel.OnLoad(recordSliceRef.Interface()) + } + + // default since time point + startTime = lastRecordTime(sel, recordSliceRef, startTime) + + endTime := time.Now() + if len(args) > 0 { + endTime = args[0] + } + + // asset "" means all assets + dataC, errC := sel.BatchQuery(ctx, startTime, endTime) + dataCRef := reflect.ValueOf(dataC) + + defer func() { + if sel.BatchInsert != nil && batchBufferRefVal.Len() > 0 { + slice := batchBufferRefVal.Interface() + if err := sel.BatchInsert(slice); err != nil { + logrus.WithError(err).Errorf("batch insert error: %+v", slice) + } + } + }() + + for { + select { + case <-ctx.Done(): + logrus.Warnf("context is cancelled, stop syncing") + return ctx.Err() + + default: + v, ok := dataCRef.Recv() + if !ok { + err := <-errC + return err + } + + obj := v.Interface() + id := sel.ID(obj) + if _, exists := ids[id]; exists { + continue + } + + tt := sel.Time(obj) + if tt.Before(startTime) || tt.Equal(endTime) || tt.After(endTime) { + continue + } + + if sel.Filter != nil { + if !sel.Filter(obj) { + continue + } + } + + if sel.BatchInsert != nil { + if batchBufferRefVal.Len() > sel.BatchInsertBuffer-1 { + if sel.LogInsert { + logrus.Infof("batch inserting %d %T", batchBufferRefVal.Len(), obj) + } else { + logrus.Debugf("batch inserting %d %T", batchBufferRefVal.Len(), obj) + } + + if err := sel.BatchInsert(batchBufferRefVal.Interface()); err != nil { + return err + } + + batchBufferRefVal = reflect.MakeSlice(reflect.SliceOf(reflect.TypeOf(sel.Type)), 0, sel.BatchInsertBuffer) + } + batchBufferRefVal = reflect.Append(batchBufferRefVal, v) + } else { + if sel.LogInsert { + logrus.Infof("inserting %T: %+v", obj, obj) + } else { + logrus.Debugf("inserting %T: %+v", obj, obj) + } + if sel.Insert != nil { + // for custom insert + if err := sel.Insert(obj); err != nil { + logrus.WithError(err).Errorf("can not insert record: %v", obj) + return err + } + } else { + if err := insertType(db, obj); err != nil { + logrus.WithError(err).Errorf("can not insert record: %v", obj) + return err + } + } + } + } + } +} + +func lastRecordTime(sel SyncTask, recordSlice reflect.Value, defaultTime time.Time) time.Time { + since := defaultTime + length := recordSlice.Len() + if length > 0 { + since = sel.Time(recordSlice.Index(length - 1).Interface()) + } + + return since +} + +func sortRecords(sel SyncTask, recordSlice reflect.Value) error { + if sel.Time == nil { + return errors.New("time field is not set, can not sort records") + } + + // always sort + sort.Slice(recordSlice.Interface(), func(i, j int) bool { + a := sel.Time(recordSlice.Index(i).Interface()) + b := sel.Time(recordSlice.Index(j).Interface()) + return a.Before(b) + }) + return nil +} + +func buildIdMap(sel SyncTask, recordSliceRef reflect.Value) map[string]struct{} { + ids := map[string]struct{}{} + for i := 0; i < recordSliceRef.Len(); i++ { + entryRef := recordSliceRef.Index(i) + id := sel.ID(entryRef.Interface()) + ids[id] = struct{}{} + } + + return ids +} diff --git a/pkg/service/totp.go b/pkg/service/totp.go index 0c7db3e27a..8e58c7d51e 100644 --- a/pkg/service/totp.go +++ b/pkg/service/totp.go @@ -31,9 +31,15 @@ func NewDefaultTotpKey() (*otp.Key, error) { } if len(totpAccountName) == 0 { + + //unix like os user, ok := os.LookupEnv("USER") if !ok { - return nil, fmt.Errorf("can not get USER env var for totp account name") + user, ok = os.LookupEnv("USERNAME") + } + + if !ok { + return nil, fmt.Errorf("can not get USER or USERNAME env var for totp account name") } totpAccountName = user diff --git a/pkg/service/trade.go b/pkg/service/trade.go index ba72bcb55a..4d07f0ac9b 100644 --- a/pkg/service/trade.go +++ b/pkg/service/trade.go @@ -1,13 +1,48 @@ package service import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + sq "github.com/Masterminds/squirrel" "github.com/jmoiron/sqlx" "github.com/pkg/errors" log "github.com/sirupsen/logrus" + "github.com/c9s/bbgo/pkg/exchange/batch" "github.com/c9s/bbgo/pkg/types" ) +var ErrTradeNotFound = errors.New("trade not found") + +type QueryTradesOptions struct { + Exchange types.ExchangeName + Symbol string + LastGID int64 + + // ASC or DESC + Ordering string + Limit int +} + +type TradingVolume struct { + Year int `db:"year" json:"year"` + Month int `db:"month" json:"month,omitempty"` + Day int `db:"day" json:"day,omitempty"` + Time time.Time `json:"time,omitempty"` + Exchange string `db:"exchange" json:"exchange,omitempty"` + Symbol string `db:"symbol" json:"symbol,omitempty"` + QuoteVolume float64 `db:"quote_volume" json:"quoteVolume"` +} + +type TradingVolumeQueryOptions struct { + GroupByPeriod string + SegmentBy string +} + type TradeService struct { DB *sqlx.DB } @@ -16,37 +51,236 @@ func NewTradeService(db *sqlx.DB) *TradeService { return &TradeService{db} } +func (s *TradeService) Sync(ctx context.Context, exchange types.Exchange, symbol string, startTime time.Time) error { + isMargin, isFutures, isIsolated, isolatedSymbol := getExchangeAttributes(exchange) + // override symbol if isolatedSymbol is not empty + if isIsolated && len(isolatedSymbol) > 0 { + symbol = isolatedSymbol + } + + api, ok := exchange.(types.ExchangeTradeHistoryService) + if !ok { + return nil + } + + lastTradeID := uint64(1) + tasks := []SyncTask{ + { + Type: types.Trade{}, + Select: SelectLastTrades(exchange.Name(), symbol, isMargin, isFutures, isIsolated, 100), + OnLoad: func(objs interface{}) { + // update last trade ID + trades := objs.([]types.Trade) + if len(trades) > 0 { + end := len(trades) - 1 + last := trades[end] + lastTradeID = last.ID + } + }, + BatchQuery: func(ctx context.Context, startTime, endTime time.Time) (interface{}, chan error) { + query := &batch.TradeBatchQuery{ + ExchangeTradeHistoryService: api, + } + return query.Query(ctx, symbol, &types.TradeQueryOptions{ + StartTime: &startTime, + EndTime: &endTime, + LastTradeID: lastTradeID, + }) + }, + Time: func(obj interface{}) time.Time { + return obj.(types.Trade).Time.Time() + }, + ID: func(obj interface{}) string { + trade := obj.(types.Trade) + return strconv.FormatUint(trade.ID, 10) + trade.Side.String() + }, + LogInsert: true, + }, + } + + for _, sel := range tasks { + if err := sel.execute(ctx, s.DB, startTime); err != nil { + return err + } + } + + return nil +} + +func (s *TradeService) QueryTradingVolume(startTime time.Time, options TradingVolumeQueryOptions) ([]TradingVolume, error) { + args := map[string]interface{}{ + // "symbol": symbol, + // "exchange": ex, + // "is_margin": isMargin, + // "is_isolated": isIsolated, + "start_time": startTime, + } + + sql := "" + driverName := s.DB.DriverName() + if driverName == "mysql" { + sql = generateMysqlTradingVolumeQuerySQL(options) + } else { + sql = generateSqliteTradingVolumeSQL(options) + } + + log.Info(sql) + + rows, err := s.DB.NamedQuery(sql, args) + if err != nil { + return nil, errors.Wrap(err, "query last trade error") + } + + if rows.Err() != nil { + return nil, rows.Err() + } + + defer rows.Close() + + var records []TradingVolume + for rows.Next() { + var record TradingVolume + err = rows.StructScan(&record) + if err != nil { + return records, err + } + + record.Time = time.Date(record.Year, time.Month(record.Month), record.Day, 0, 0, 0, 0, time.UTC) + records = append(records, record) + } + + return records, rows.Err() +} + +func generateSqliteTradingVolumeSQL(options TradingVolumeQueryOptions) string { + timeRangeColumn := "traded_at" + sel, groupBys, orderBys := generateSqlite3TimeRangeClauses(timeRangeColumn, options.GroupByPeriod) + + switch options.SegmentBy { + case "symbol": + sel = append(sel, "symbol") + groupBys = append([]string{"symbol"}, groupBys...) + orderBys = append(orderBys, "symbol") + case "exchange": + sel = append(sel, "exchange") + groupBys = append([]string{"exchange"}, groupBys...) + orderBys = append(orderBys, "exchange") + } + + sel = append(sel, "SUM(quantity * price) AS quote_volume") + where := []string{timeRangeColumn + " > :start_time"} + sql := `SELECT ` + strings.Join(sel, ", ") + ` FROM trades` + + ` WHERE ` + strings.Join(where, " AND ") + + ` GROUP BY ` + strings.Join(groupBys, ", ") + + ` ORDER BY ` + strings.Join(orderBys, ", ") + + return sql +} + +func generateSqlite3TimeRangeClauses(timeRangeColumn, period string) (selectors []string, groupBys []string, orderBys []string) { + switch period { + case "month": + selectors = append(selectors, "strftime('%Y',"+timeRangeColumn+") AS year", "strftime('%m',"+timeRangeColumn+") AS month") + groupBys = append([]string{"month", "year"}, groupBys...) + orderBys = append(orderBys, "year ASC", "month ASC") + + case "year": + selectors = append(selectors, "strftime('%Y',"+timeRangeColumn+") AS year") + groupBys = append([]string{"year"}, groupBys...) + orderBys = append(orderBys, "year ASC") + + case "day": + fallthrough + + default: + selectors = append(selectors, "strftime('%Y',"+timeRangeColumn+") AS year", "strftime('%m',"+timeRangeColumn+") AS month", "strftime('%d',"+timeRangeColumn+") AS day") + groupBys = append([]string{"day", "month", "year"}, groupBys...) + orderBys = append(orderBys, "year ASC", "month ASC", "day ASC") + } + + return +} + +func generateMysqlTimeRangeClauses(timeRangeColumn, period string) (selectors []string, groupBys []string, orderBys []string) { + switch period { + case "month": + selectors = append(selectors, "YEAR("+timeRangeColumn+") AS year", "MONTH("+timeRangeColumn+") AS month") + groupBys = append([]string{"MONTH(" + timeRangeColumn + ")", "YEAR(" + timeRangeColumn + ")"}, groupBys...) + orderBys = append(orderBys, "year ASC", "month ASC") + + case "year": + selectors = append(selectors, "YEAR("+timeRangeColumn+") AS year") + groupBys = append([]string{"YEAR(" + timeRangeColumn + ")"}, groupBys...) + orderBys = append(orderBys, "year ASC") + + case "day": + fallthrough + + default: + selectors = append(selectors, "YEAR("+timeRangeColumn+") AS year", "MONTH("+timeRangeColumn+") AS month", "DAY("+timeRangeColumn+") AS day") + groupBys = append([]string{"DAY(" + timeRangeColumn + ")", "MONTH(" + timeRangeColumn + ")", "YEAR(" + timeRangeColumn + ")"}, groupBys...) + orderBys = append(orderBys, "year ASC", "month ASC", "day ASC") + } + + return +} + +func generateMysqlTradingVolumeQuerySQL(options TradingVolumeQueryOptions) string { + timeRangeColumn := "traded_at" + sel, groupBys, orderBys := generateMysqlTimeRangeClauses(timeRangeColumn, options.GroupByPeriod) + + switch options.SegmentBy { + case "symbol": + sel = append(sel, "symbol") + groupBys = append([]string{"symbol"}, groupBys...) + orderBys = append(orderBys, "symbol") + case "exchange": + sel = append(sel, "exchange") + groupBys = append([]string{"exchange"}, groupBys...) + orderBys = append(orderBys, "exchange") + } + + sel = append(sel, "SUM(quantity * price) AS quote_volume") + where := []string{timeRangeColumn + " > :start_time"} + sql := `SELECT ` + strings.Join(sel, ", ") + ` FROM trades` + + ` WHERE ` + strings.Join(where, " AND ") + + ` GROUP BY ` + strings.Join(groupBys, ", ") + + ` ORDER BY ` + strings.Join(orderBys, ", ") + + return sql +} + // QueryLast queries the last trade from the database -func (s *TradeService) QueryLast(ex types.ExchangeName, symbol string, isMargin bool, isIsolated bool) (*types.Trade, error) { - log.Infof("querying last trade exchange = %s AND symbol = %s AND is_margin = %v AND is_isolated = %v", ex, symbol, isMargin, isIsolated) +func (s *TradeService) QueryLast(ex types.ExchangeName, symbol string, isMargin, isFutures, isIsolated bool, limit int) ([]types.Trade, error) { + log.Debugf("querying last trade exchange = %s AND symbol = %s AND is_margin = %v AND is_futures = %v AND is_isolated = %v", ex, symbol, isMargin, isFutures, isIsolated) - rows, err := s.DB.NamedQuery(`SELECT * FROM trades WHERE exchange = :exchange AND symbol = :symbol AND is_margin = :is_margin AND is_isolated = :is_isolated ORDER BY gid DESC LIMIT 1`, map[string]interface{}{ + sql := "SELECT * FROM trades WHERE exchange = :exchange AND symbol = :symbol AND is_margin = :is_margin AND is_futures = :is_futures AND is_isolated = :is_isolated ORDER BY traded_at DESC LIMIT :limit" + rows, err := s.DB.NamedQuery(sql, map[string]interface{}{ "symbol": symbol, "exchange": ex, "is_margin": isMargin, + "is_futures": isFutures, "is_isolated": isIsolated, + "limit": limit, }) if err != nil { return nil, errors.Wrap(err, "query last trade error") } - if rows.Err() != nil { - return nil, rows.Err() - } - defer rows.Close() - if rows.Next() { - var trade types.Trade - err = rows.StructScan(&trade) - return &trade, err + trades, err := s.scanRows(rows) + if err != nil { + return nil, err } - return nil, rows.Err() + trades = types.SortTradesAscending(trades) + return trades, nil } func (s *TradeService) QueryForTradingFeeCurrency(ex types.ExchangeName, symbol string, feeCurrency string) ([]types.Trade, error) { - rows, err := s.DB.NamedQuery(`SELECT * FROM trades WHERE exchange = :exchange AND (symbol = :symbol OR fee_currency = :fee_currency) ORDER BY traded_at ASC`, map[string]interface{}{ + sql := "SELECT * FROM trades WHERE exchange = :exchange AND (symbol = :symbol OR fee_currency = :fee_currency) ORDER BY traded_at ASC" + rows, err := s.DB.NamedQuery(sql, map[string]interface{}{ "exchange": ex, "symbol": symbol, "fee_currency": feeCurrency, @@ -60,11 +294,14 @@ func (s *TradeService) QueryForTradingFeeCurrency(ex types.ExchangeName, symbol return s.scanRows(rows) } -func (s *TradeService) Query(ex types.ExchangeName, symbol string) ([]types.Trade, error) { - rows, err := s.DB.NamedQuery(`SELECT * FROM trades WHERE exchange = :exchange AND symbol = :symbol ORDER BY gid ASC`, map[string]interface{}{ - "exchange": ex, - "symbol": symbol, - }) +func (s *TradeService) Query(options QueryTradesOptions) ([]types.Trade, error) { + sql := queryTradesSQL(options) + args := map[string]interface{}{ + "exchange": options.Exchange, + "symbol": options.Symbol, + } + + rows, err := s.DB.NamedQuery(sql, args) if err != nil { return nil, err } @@ -74,6 +311,109 @@ func (s *TradeService) Query(ex types.ExchangeName, symbol string) ([]types.Trad return s.scanRows(rows) } +func (s *TradeService) Load(ctx context.Context, id int64) (*types.Trade, error) { + var trade types.Trade + + rows, err := s.DB.NamedQuery("SELECT * FROM trades WHERE id = :id", map[string]interface{}{ + "id": id, + }) + if err != nil { + return nil, err + } + + defer rows.Close() + + if rows.Next() { + err = rows.StructScan(&trade) + return &trade, err + } + + return nil, errors.Wrapf(ErrTradeNotFound, "trade id:%d not found", id) +} + +func (s *TradeService) Mark(ctx context.Context, id int64, strategyID string) error { + result, err := s.DB.NamedExecContext(ctx, "UPDATE `trades` SET `strategy` = :strategy WHERE `id` = :id", map[string]interface{}{ + "id": id, + "strategy": strategyID, + }) + if err != nil { + return err + } + + cnt, err := result.RowsAffected() + if err != nil { + return err + } + + if cnt == 0 { + return fmt.Errorf("trade id:%d not found", id) + } + + return nil +} + +func (s *TradeService) UpdatePnL(ctx context.Context, id int64, pnl float64) error { + result, err := s.DB.NamedExecContext(ctx, "UPDATE `trades` SET `pnl` = :pnl WHERE `id` = :id", map[string]interface{}{ + "id": id, + "pnl": pnl, + }) + if err != nil { + return err + } + + cnt, err := result.RowsAffected() + if err != nil { + return err + } + + if cnt == 0 { + return fmt.Errorf("trade id:%d not found", id) + } + + return nil + +} + +func queryTradesSQL(options QueryTradesOptions) string { + ordering := "ASC" + switch v := strings.ToUpper(options.Ordering); v { + case "DESC", "ASC": + ordering = v + } + + var where []string + + if options.LastGID > 0 { + switch ordering { + case "ASC": + where = append(where, "gid > :gid") + case "DESC": + where = append(where, "gid < :gid") + } + } + + if len(options.Symbol) > 0 { + where = append(where, `symbol = :symbol`) + } + + if len(options.Exchange) > 0 { + where = append(where, `exchange = :exchange`) + } + + sql := `SELECT * FROM trades` + if len(where) > 0 { + sql += ` WHERE ` + strings.Join(where, " AND ") + } + + sql += ` ORDER BY gid ` + ordering + + if options.Limit > 0 { + sql += ` LIMIT ` + strconv.Itoa(options.Limit) + } + + return sql +} + func (s *TradeService) scanRows(rows *sqlx.Rows) (trades []types.Trade, err error) { for rows.Next() { var trade types.Trade @@ -88,9 +428,52 @@ func (s *TradeService) scanRows(rows *sqlx.Rows) (trades []types.Trade, err erro } func (s *TradeService) Insert(trade types.Trade) error { - _, err := s.DB.NamedExec(` - INSERT IGNORE INTO trades (id, exchange, order_id, symbol, price, quantity, quote_quantity, side, is_buyer, is_maker, fee, fee_currency, traded_at, is_margin, is_isolated) - VALUES (:id, :exchange, :order_id, :symbol, :price, :quantity, :quote_quantity, :side, :is_buyer, :is_maker, :fee, :fee_currency, :traded_at, :is_margin, :is_isolated)`, - trade) + sql := dbCache.InsertSqlOf(trade) + _, err := s.DB.NamedExec(sql, trade) return err } + +func (s *TradeService) DeleteAll() error { + _, err := s.DB.Exec(`DELETE FROM trades`) + return err +} + +func SelectLastTrades(ex types.ExchangeName, symbol string, isMargin, isFutures, isIsolated bool, limit uint64) sq.SelectBuilder { + return sq.Select("*"). + From("trades"). + Where(sq.And{ + sq.Eq{"symbol": symbol}, + sq.Eq{"exchange": ex}, + sq.Eq{"is_margin": isMargin}, + sq.Eq{"is_futures": isFutures}, + sq.Eq{"is_isolated": isIsolated}, + }). + OrderBy("traded_at DESC"). + Limit(limit) +} + +func getExchangeAttributes(exchange types.Exchange) (isMargin, isFutures, isIsolated bool, isolatedSymbol string) { + if marginExchange, ok := exchange.(types.MarginExchange); ok { + marginSettings := marginExchange.GetMarginSettings() + isMargin = marginSettings.IsMargin + if isMargin { + isIsolated = marginSettings.IsIsolatedMargin + if marginSettings.IsIsolatedMargin { + isolatedSymbol = marginSettings.IsolatedMarginSymbol + } + } + } + + if futuresExchange, ok := exchange.(types.FuturesExchange); ok { + futuresSettings := futuresExchange.GetFuturesSettings() + isFutures = futuresSettings.IsFutures + if isFutures { + isIsolated = futuresSettings.IsIsolatedFutures + if futuresSettings.IsIsolatedFutures { + isolatedSymbol = futuresSettings.IsolatedFuturesSymbol + } + } + } + + return isMargin, isFutures, isIsolated, isolatedSymbol +} diff --git a/pkg/service/trade_test.go b/pkg/service/trade_test.go new file mode 100644 index 0000000000..3188fe9daa --- /dev/null +++ b/pkg/service/trade_test.go @@ -0,0 +1,110 @@ +package service + +import ( + "context" + "testing" + "time" + + "github.com/jmoiron/sqlx" + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func Test_tradeService(t *testing.T) { + db, err := prepareDB(t) + if err != nil { + t.Fatal(err) + } + + defer db.Close() + + ctx := context.Background() + + xdb := sqlx.NewDb(db.DB, "sqlite3") + service := &TradeService{DB: xdb} + + err = service.Insert(types.Trade{ + ID: 1, + OrderID: 1, + Exchange: "binance", + Price: fixedpoint.NewFromInt(1000), + Quantity: fixedpoint.NewFromFloat(0.1), + QuoteQuantity: fixedpoint.NewFromFloat(1000.0 * 0.1), + Symbol: "BTCUSDT", + Side: "BUY", + IsBuyer: true, + Time: types.Time(time.Now()), + }) + assert.NoError(t, err) + + err = service.Mark(ctx, 1, "grid") + assert.NoError(t, err) + + tradeRecord, err := service.Load(ctx, 1) + assert.NoError(t, err) + assert.NotNil(t, tradeRecord) + assert.True(t, tradeRecord.StrategyID.Valid) + assert.Equal(t, "grid", tradeRecord.StrategyID.String) + + err = service.UpdatePnL(ctx, 1, 10.0) + assert.NoError(t, err) + + tradeRecord, err = service.Load(ctx, 1) + assert.NoError(t, err) + assert.NotNil(t, tradeRecord) + assert.True(t, tradeRecord.PnL.Valid) + assert.Equal(t, 10.0, tradeRecord.PnL.Float64) +} + +func Test_queryTradingVolumeSQL(t *testing.T) { + t.Run("group by different period", func(t *testing.T) { + o := TradingVolumeQueryOptions{ + GroupByPeriod: "month", + } + assert.Equal(t, "SELECT YEAR(traded_at) AS year, MONTH(traded_at) AS month, SUM(quantity * price) AS quote_volume FROM trades WHERE traded_at > :start_time GROUP BY MONTH(traded_at), YEAR(traded_at) ORDER BY year ASC, month ASC", generateMysqlTradingVolumeQuerySQL(o)) + + o.GroupByPeriod = "year" + assert.Equal(t, "SELECT YEAR(traded_at) AS year, SUM(quantity * price) AS quote_volume FROM trades WHERE traded_at > :start_time GROUP BY YEAR(traded_at) ORDER BY year ASC", generateMysqlTradingVolumeQuerySQL(o)) + + expectedDefaultSQL := "SELECT YEAR(traded_at) AS year, MONTH(traded_at) AS month, DAY(traded_at) AS day, SUM(quantity * price) AS quote_volume FROM trades WHERE traded_at > :start_time GROUP BY DAY(traded_at), MONTH(traded_at), YEAR(traded_at) ORDER BY year ASC, month ASC, day ASC" + for _, s := range []string{"", "day"} { + o.GroupByPeriod = s + assert.Equal(t, expectedDefaultSQL, generateMysqlTradingVolumeQuerySQL(o)) + } + }) + +} + +func Test_queryTradesSQL(t *testing.T) { + t.Run("generate order by clause by Ordering option", func(t *testing.T) { + assert.Equal(t, "SELECT * FROM trades ORDER BY gid ASC LIMIT 500", queryTradesSQL(QueryTradesOptions{Limit: 500})) + assert.Equal(t, "SELECT * FROM trades ORDER BY gid ASC LIMIT 500", queryTradesSQL(QueryTradesOptions{Ordering: "ASC", Limit: 500})) + assert.Equal(t, "SELECT * FROM trades ORDER BY gid DESC LIMIT 500", queryTradesSQL(QueryTradesOptions{Ordering: "DESC", Limit: 500})) + }) + + t.Run("filter by exchange name", func(t *testing.T) { + assert.Equal(t, "SELECT * FROM trades WHERE exchange = :exchange ORDER BY gid ASC LIMIT 500", queryTradesSQL(QueryTradesOptions{Exchange: "max", Limit: 500})) + }) + + t.Run("filter by symbol", func(t *testing.T) { + assert.Equal(t, "SELECT * FROM trades WHERE symbol = :symbol ORDER BY gid ASC LIMIT 500", queryTradesSQL(QueryTradesOptions{Symbol: "eth", Limit: 500})) + }) + + t.Run("GID ordering", func(t *testing.T) { + assert.Equal(t, "SELECT * FROM trades WHERE gid > :gid ORDER BY gid ASC LIMIT 500", queryTradesSQL(QueryTradesOptions{LastGID: 1, Limit: 500})) + assert.Equal(t, "SELECT * FROM trades WHERE gid > :gid ORDER BY gid ASC LIMIT 500", queryTradesSQL(QueryTradesOptions{LastGID: 1, Ordering: "ASC", Limit: 500})) + assert.Equal(t, "SELECT * FROM trades WHERE gid < :gid ORDER BY gid DESC LIMIT 500", queryTradesSQL(QueryTradesOptions{LastGID: 1, Ordering: "DESC", Limit: 500})) + }) + + t.Run("convert all options", func(t *testing.T) { + assert.Equal(t, "SELECT * FROM trades WHERE gid < :gid AND symbol = :symbol AND exchange = :exchange ORDER BY gid DESC LIMIT 500", queryTradesSQL(QueryTradesOptions{ + Exchange: "max", + Symbol: "btc", + LastGID: 123, + Ordering: "DESC", + Limit: 500, + })) + }) +} diff --git a/pkg/service/withdraw.go b/pkg/service/withdraw.go new file mode 100644 index 0000000000..af0a0df837 --- /dev/null +++ b/pkg/service/withdraw.go @@ -0,0 +1,130 @@ +package service + +import ( + "context" + "time" + + sq "github.com/Masterminds/squirrel" + "github.com/jmoiron/sqlx" + + "github.com/c9s/bbgo/pkg/exchange/batch" + "github.com/c9s/bbgo/pkg/types" +) + +type WithdrawService struct { + DB *sqlx.DB +} + +// Sync syncs the withdrawal records into db +func (s *WithdrawService) Sync(ctx context.Context, ex types.Exchange, startTime time.Time) error { + isMargin, isFutures, isIsolated, _ := getExchangeAttributes(ex) + if isMargin || isFutures || isIsolated { + // only works in spot + return nil + } + + transferApi, ok := ex.(types.ExchangeTransferService) + if !ok { + return nil + } + + tasks := []SyncTask{ + { + Type: types.Withdraw{}, + Select: SelectLastWithdraws(ex.Name(), 100), + BatchQuery: func(ctx context.Context, startTime, endTime time.Time) (interface{}, chan error) { + query := &batch.WithdrawBatchQuery{ + ExchangeTransferService: transferApi, + } + return query.Query(ctx, "", startTime, endTime) + }, + Time: func(obj interface{}) time.Time { + return obj.(types.Withdraw).ApplyTime.Time() + }, + ID: func(obj interface{}) string { + withdraw := obj.(types.Withdraw) + return withdraw.TransactionID + }, + Filter: func(obj interface{}) bool { + withdraw := obj.(types.Withdraw) + if withdraw.Status == "rejected" { + return false + } + + if len(withdraw.TransactionID) == 0 { + return false + } + + return true + }, + LogInsert: true, + }, + } + + for _, sel := range tasks { + if err := sel.execute(ctx, s.DB, startTime); err != nil { + return err + } + } + + return nil +} + +func SelectLastWithdraws(ex types.ExchangeName, limit uint64) sq.SelectBuilder { + return sq.Select("*"). + From("withdraws"). + Where(sq.And{ + sq.Eq{"exchange": ex}, + }). + OrderBy("time DESC"). + Limit(limit) +} + +func (s *WithdrawService) QueryLast(ex types.ExchangeName, limit int) ([]types.Withdraw, error) { + sql := "SELECT * FROM `withdraws` WHERE `exchange` = :exchange ORDER BY `time` DESC LIMIT :limit" + rows, err := s.DB.NamedQuery(sql, map[string]interface{}{ + "exchange": ex, + "limit": limit, + }) + if err != nil { + return nil, err + } + + defer rows.Close() + return s.scanRows(rows) +} + +func (s *WithdrawService) Query(exchangeName types.ExchangeName) ([]types.Withdraw, error) { + args := map[string]interface{}{ + "exchange": exchangeName, + } + sql := "SELECT * FROM `withdraws` WHERE `exchange` = :exchange ORDER BY `time` ASC" + rows, err := s.DB.NamedQuery(sql, args) + if err != nil { + return nil, err + } + + defer rows.Close() + + return s.scanRows(rows) +} + +func (s *WithdrawService) scanRows(rows *sqlx.Rows) (withdraws []types.Withdraw, err error) { + for rows.Next() { + var withdraw types.Withdraw + if err := rows.StructScan(&withdraw); err != nil { + return withdraws, err + } + + withdraws = append(withdraws, withdraw) + } + + return withdraws, rows.Err() +} + +func (s *WithdrawService) Insert(withdrawal types.Withdraw) error { + sql := `INSERT INTO withdraws (exchange, asset, network, address, amount, txn_id, txn_fee, time) + VALUES (:exchange, :asset, :network, :address, :amount, :txn_id, :txn_fee, :time)` + _, err := s.DB.NamedExec(sql, withdrawal) + return err +} diff --git a/pkg/service/withdraw_test.go b/pkg/service/withdraw_test.go new file mode 100644 index 0000000000..3328a0eee3 --- /dev/null +++ b/pkg/service/withdraw_test.go @@ -0,0 +1,41 @@ +package service + +import ( + "testing" + "time" + + "github.com/jmoiron/sqlx" + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +func TestWithdrawService(t *testing.T) { + db, err := prepareDB(t) + if err != nil { + t.Fatal(err) + } + + defer db.Close() + + xdb := sqlx.NewDb(db.DB, "sqlite3") + service := &WithdrawService{DB: xdb} + + err = service.Insert(types.Withdraw{ + Exchange: types.ExchangeMax, + Asset: "BTC", + Amount: fixedpoint.NewFromFloat(0.0001), + Address: "test", + TransactionID: "01", + TransactionFee: fixedpoint.NewFromFloat(0.0001), + Network: "omni", + ApplyTime: types.Time(time.Now()), + }) + assert.NoError(t, err) + + withdraws, err := service.Query(types.ExchangeMax) + assert.NoError(t, err) + assert.NotEmpty(t, withdraws) + assert.Equal(t, types.ExchangeMax, withdraws[0].Exchange) +} diff --git a/pkg/slack/slacklog/logrus_look.go b/pkg/slack/slacklog/logrus_look.go index 5d56375b35..e4349a021e 100644 --- a/pkg/slack/slacklog/logrus_look.go +++ b/pkg/slack/slacklog/logrus_look.go @@ -40,7 +40,7 @@ func (t *LogHook) Fire(e *logrus.Entry) error { return nil } - var color = "" + var color string switch e.Level { case logrus.DebugLevel: diff --git a/pkg/slack/slackstyle/style.go b/pkg/slack/slackstyle/style.go index 1f3fca63e6..46914aa8e9 100644 --- a/pkg/slack/slackstyle/style.go +++ b/pkg/slack/slackstyle/style.go @@ -1,8 +1,14 @@ package slackstyle +// Green is the green hex color const Green = "#228B22" + +// Red is the red hex color const Red = "#800000" +// TrendIcon returns the slack emoji of trends +// 1: uptrend +// -1: downtrend func TrendIcon(trend int) string { if trend < 0 { return ":chart_with_downwards_trend:" diff --git a/pkg/strategy/autoborrow/strategy.go b/pkg/strategy/autoborrow/strategy.go new file mode 100644 index 0000000000..77b65f6a24 --- /dev/null +++ b/pkg/strategy/autoborrow/strategy.go @@ -0,0 +1,392 @@ +package autoborrow + +import ( + "context" + "fmt" + "time" + + "github.com/sirupsen/logrus" + "github.com/slack-go/slack" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/exchange/binance" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +const ID = "autoborrow" + +var log = logrus.WithField("strategy", ID) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +/** +- on: binance + autoborrow: + interval: 30m + repayWhenDeposit: true + + # minMarginLevel for triggering auto borrow + minMarginLevel: 1.5 + assets: + - asset: ETH + low: 3.0 + maxQuantityPerBorrow: 1.0 + maxTotalBorrow: 10.0 + - asset: USDT + low: 1000.0 + maxQuantityPerBorrow: 100.0 + maxTotalBorrow: 10.0 +*/ + +type MarginAsset struct { + Asset string `json:"asset"` + Low fixedpoint.Value `json:"low"` + MaxTotalBorrow fixedpoint.Value `json:"maxTotalBorrow"` + MaxQuantityPerBorrow fixedpoint.Value `json:"maxQuantityPerBorrow"` + MinQuantityPerBorrow fixedpoint.Value `json:"minQuantityPerBorrow"` +} + +type Strategy struct { + Interval types.Interval `json:"interval"` + MinMarginLevel fixedpoint.Value `json:"minMarginLevel"` + MaxMarginLevel fixedpoint.Value `json:"maxMarginLevel"` + AutoRepayWhenDeposit bool `json:"autoRepayWhenDeposit"` + + Assets []MarginAsset `json:"assets"` + + ExchangeSession *bbgo.ExchangeSession + + marginBorrowRepay types.MarginBorrowRepayService +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + // session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: "1m"}) +} + +func (s *Strategy) tryToRepayAnyDebt(ctx context.Context) { + log.Infof("trying to repay any debt...") + + account, err := s.ExchangeSession.UpdateAccount(ctx) + if err != nil { + log.WithError(err).Errorf("can not update account") + return + } + + minMarginLevel := s.MinMarginLevel + curMarginLevel := account.MarginLevel + + balances := account.Balances() + for _, b := range balances { + if b.Borrowed.Sign() <= 0 { + continue + } + + if b.Available.IsZero() { + continue + } + + toRepay := b.Available + bbgo.Notify(&MarginAction{ + Exchange: s.ExchangeSession.ExchangeName, + Action: "Repay", + Asset: b.Currency, + Amount: toRepay, + MarginLevel: curMarginLevel, + MinMarginLevel: minMarginLevel, + }) + + log.Infof("repaying %f %s", toRepay.Float64(), b.Currency) + if err := s.marginBorrowRepay.RepayMarginAsset(context.Background(), b.Currency, toRepay); err != nil { + log.WithError(err).Errorf("margin repay error") + } + } +} + +func (s *Strategy) checkAndBorrow(ctx context.Context) { + if s.MinMarginLevel.IsZero() { + return + } + + account, err := s.ExchangeSession.UpdateAccount(ctx) + if err != nil { + log.WithError(err).Errorf("can not update account") + return + } + + minMarginLevel := s.MinMarginLevel + curMarginLevel := account.MarginLevel + + log.Infof("current account margin level: %s margin ratio: %s, margin tolerance: %s", + account.MarginLevel.String(), + account.MarginRatio.String(), + account.MarginTolerance.String(), + ) + + // if margin ratio is too low, do not borrow + if curMarginLevel.Compare(minMarginLevel) < 0 { + log.Infof("current margin level %f < min margin level %f, skip autoborrow", curMarginLevel.Float64(), minMarginLevel.Float64()) + bbgo.Notify("Warning!!! %s Current Margin Level %f < Minimal Margin Level %f", + s.ExchangeSession.Name, + curMarginLevel.Float64(), + minMarginLevel.Float64(), + account.Balances().Debts(), + ) + s.tryToRepayAnyDebt(ctx) + return + } + + balances := account.Balances() + if len(balances) == 0 { + log.Warn("balance is empty, skip autoborrow") + return + } + + for _, marginAsset := range s.Assets { + changed := false + + if marginAsset.Low.IsZero() { + log.Warnf("margin asset low balance is not set: %+v", marginAsset) + continue + } + + b, ok := balances[marginAsset.Asset] + if ok { + toBorrow := marginAsset.Low.Sub(b.Total()) + if toBorrow.Sign() < 0 { + log.Infof("balance %f > low %f. no need to borrow asset %+v", + b.Total().Float64(), + marginAsset.Low.Float64(), + marginAsset) + continue + } + + if !marginAsset.MaxQuantityPerBorrow.IsZero() { + toBorrow = fixedpoint.Min(toBorrow, marginAsset.MaxQuantityPerBorrow) + } + + if !marginAsset.MaxTotalBorrow.IsZero() { + // check if we over borrow + newBorrow := toBorrow.Add(b.Borrowed) + if newBorrow.Compare(marginAsset.MaxTotalBorrow) > 0 { + toBorrow = toBorrow.Sub(newBorrow.Sub(marginAsset.MaxTotalBorrow)) + if toBorrow.Sign() < 0 { + log.Warnf("margin asset %s is over borrowed, skip", marginAsset.Asset) + continue + } + } + } + + if toBorrow.IsZero() { + continue + } + + bbgo.Notify(&MarginAction{ + Exchange: s.ExchangeSession.ExchangeName, + Action: "Borrow", + Asset: marginAsset.Asset, + Amount: toBorrow, + MarginLevel: curMarginLevel, + MinMarginLevel: minMarginLevel, + }) + log.Infof("sending borrow request %f %s", toBorrow.Float64(), marginAsset.Asset) + if err := s.marginBorrowRepay.BorrowMarginAsset(ctx, marginAsset.Asset, toBorrow); err != nil { + log.WithError(err).Errorf("borrow error") + continue + } + changed = true + } else { + // available balance is less than marginAsset.Low, we should trigger borrow + toBorrow := marginAsset.Low + + if !marginAsset.MaxQuantityPerBorrow.IsZero() { + toBorrow = fixedpoint.Min(toBorrow, marginAsset.MaxQuantityPerBorrow) + } + + if toBorrow.IsZero() { + continue + } + + bbgo.Notify(&MarginAction{ + Exchange: s.ExchangeSession.ExchangeName, + Action: "Borrow", + Asset: marginAsset.Asset, + Amount: toBorrow, + MarginLevel: curMarginLevel, + MinMarginLevel: minMarginLevel, + }) + + log.Infof("sending borrow request %f %s", toBorrow.Float64(), marginAsset.Asset) + if err := s.marginBorrowRepay.BorrowMarginAsset(ctx, marginAsset.Asset, toBorrow); err != nil { + log.WithError(err).Errorf("borrow error") + continue + } + + changed = true + } + + // if debt is changed, we need to update account + if changed { + account, err = s.ExchangeSession.UpdateAccount(ctx) + if err != nil { + log.WithError(err).Errorf("can not update account") + return + } + } + } +} + +func (s *Strategy) run(ctx context.Context, interval time.Duration) { + ticker := time.NewTicker(interval) + defer ticker.Stop() + + s.checkAndBorrow(ctx) + for { + select { + case <-ctx.Done(): + return + + case <-ticker.C: + s.checkAndBorrow(ctx) + + } + } +} + +func (s *Strategy) handleBalanceUpdate(balances types.BalanceMap) { + if s.MinMarginLevel.IsZero() { + return + } + + if s.ExchangeSession.GetAccount().MarginLevel.Compare(s.MinMarginLevel) > 0 { + return + } + + for _, b := range balances { + if b.Available.IsZero() && b.Borrowed.IsZero() { + continue + } + } +} + +func (s *Strategy) handleBinanceBalanceUpdateEvent(event *binance.BalanceUpdateEvent) { + if s.MinMarginLevel.IsZero() { + return + } + + if s.ExchangeSession.GetAccount().MarginLevel.Compare(s.MinMarginLevel) > 0 { + return + } + + delta := fixedpoint.MustNewFromString(event.Delta) + + // ignore outflow + if delta.Sign() < 0 { + return + } + + account := s.ExchangeSession.GetAccount() + minMarginLevel := s.MinMarginLevel + curMarginLevel := account.MarginLevel + + if b, ok := account.Balance(event.Asset); ok { + if b.Available.IsZero() || b.Borrowed.IsZero() { + return + } + + toRepay := b.Available + bbgo.Notify(&MarginAction{ + Exchange: s.ExchangeSession.ExchangeName, + Action: "Repay", + Asset: b.Currency, + Amount: toRepay, + MarginLevel: curMarginLevel, + MinMarginLevel: minMarginLevel, + }) + if err := s.marginBorrowRepay.RepayMarginAsset(context.Background(), event.Asset, toRepay); err != nil { + log.WithError(err).Errorf("margin repay error") + } + } +} + +type MarginAction struct { + Exchange types.ExchangeName `json:"exchange"` + Action string `json:"action"` + Asset string `json:"asset"` + Amount fixedpoint.Value `json:"amount"` + MarginLevel fixedpoint.Value `json:"marginLevel"` + MinMarginLevel fixedpoint.Value `json:"minMarginLevel"` +} + +func (a *MarginAction) SlackAttachment() slack.Attachment { + return slack.Attachment{ + Title: fmt.Sprintf("%s %s %s", a.Action, a.Amount, a.Asset), + Color: "warning", + Fields: []slack.AttachmentField{ + { + Title: "Exchange", + Value: a.Exchange.String(), + Short: true, + }, + { + Title: "Action", + Value: a.Action, + Short: true, + }, + { + Title: "Asset", + Value: a.Asset, + Short: true, + }, + { + Title: "Amount", + Value: a.Amount.String(), + Short: true, + }, + { + Title: "Current Margin Level", + Value: a.MarginLevel.String(), + Short: true, + }, + { + Title: "Min Margin Level", + Value: a.MinMarginLevel.String(), + Short: true, + }, + }, + } +} + +// This strategy simply spent all available quote currency to buy the symbol whenever kline gets closed +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + if s.MinMarginLevel.IsZero() { + log.Warnf("minMarginLevel is 0, you should configure this minimal margin ratio for controlling the liquidation risk") + } + + s.ExchangeSession = session + + marginBorrowRepay, ok := session.Exchange.(types.MarginBorrowRepayService) + if !ok { + return fmt.Errorf("exchange %s does not implement types.MarginBorrowRepayService", session.ExchangeName) + } + + s.marginBorrowRepay = marginBorrowRepay + + if s.AutoRepayWhenDeposit { + binanceStream, ok := session.UserDataStream.(*binance.Stream) + if ok { + binanceStream.OnBalanceUpdateEvent(s.handleBinanceBalanceUpdateEvent) + } else { + session.UserDataStream.OnBalanceUpdate(s.handleBalanceUpdate) + } + } + + go s.run(ctx, s.Interval.Duration()) + return nil +} diff --git a/pkg/strategy/bollgrid/strategy.go b/pkg/strategy/bollgrid/strategy.go index f2c5b5f848..8d689f7d47 100644 --- a/pkg/strategy/bollgrid/strategy.go +++ b/pkg/strategy/bollgrid/strategy.go @@ -2,6 +2,7 @@ package bollgrid import ( "context" + "fmt" "sync" "github.com/sirupsen/logrus" @@ -12,20 +13,18 @@ import ( "github.com/c9s/bbgo/pkg/types" ) -var log = logrus.WithField("strategy", "bollgrid") +const ID = "bollgrid" + +var log = logrus.WithField("strategy", ID) func init() { // Register the pointer of the strategy struct, // so that bbgo knows what struct to be used to unmarshal the configs (YAML or JSON) // Note: built-in strategies need to imported manually in the bbgo cmd package. - bbgo.RegisterStrategy("bollgrid", &Strategy{}) + bbgo.RegisterStrategy(ID, &Strategy{}) } type Strategy struct { - // The notification system will be injected into the strategy automatically. - // This field will be injected automatically since it's a single exchange strategy. - *bbgo.Notifiability - // OrderExecutor is an interface for submitting order. // This field will be injected automatically since it's a single exchange strategy. bbgo.OrderExecutor @@ -68,195 +67,208 @@ type Strategy struct { GridNum int `json:"gridNumber"` // Quantity is the quantity you want to submit for each order. - Quantity float64 `json:"quantity"` + Quantity fixedpoint.Value `json:"quantity"` // activeOrders is the locally maintained active order book of the maker orders. - activeOrders *bbgo.LocalActiveOrderBook + activeOrders *bbgo.ActiveOrderBook - profitOrders *bbgo.LocalActiveOrderBook + profitOrders *bbgo.ActiveOrderBook orders *bbgo.OrderStore // boll is the BOLLINGER indicator we used for predicting the price. boll *indicator.BOLL -} -func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { - // currently we need the 1m kline to update the last close price and indicators - session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.Interval.String()}) + CancelProfitOrdersOnShutdown bool `json: "shutdownCancelProfitOrders"` } -func (s *Strategy) updateBidOrders(orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) { - quoteCurrency := s.Market.QuoteCurrency - balances := session.Account.Balances() +func (s *Strategy) ID() string { + return ID +} - balance, ok := balances[quoteCurrency] - if !ok || balance.Available <= 0 { - return +func (s *Strategy) Validate() error { + if s.ProfitSpread.Sign() <= 0 { + // If profitSpread is empty or its value is negative + return fmt.Errorf("profit spread should bigger than 0") } - - var downBand = s.boll.LastDownBand() - if downBand <= 0.0 { - return + if s.Quantity.Sign() <= 0 { + // If quantity is empty or its value is negative + return fmt.Errorf("quantity should bigger than 0") } + return nil +} - var startPrice = downBand +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + if s.Interval == "" { + panic("bollgrid interval can not be empty") + } - var submitOrders []types.SubmitOrder - for i := 0; i < s.GridNum; i++ { - submitOrders = append(submitOrders, types.SubmitOrder{ - Symbol: s.Symbol, - Side: types.SideTypeBuy, - Type: types.OrderTypeLimit, - Market: s.Market, - Quantity: s.Quantity, - Price: startPrice, - TimeInForce: "GTC", - }) + // currently we need the 1m kline to update the last close price and indicators + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.Interval}) - startPrice -= s.GridPips.Float64() + if len(s.RepostInterval) > 0 && s.Interval != s.RepostInterval { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.RepostInterval}) } +} - orders, err := orderExecutor.SubmitOrders(context.Background(), submitOrders...) - if err != nil { - log.WithError(err).Errorf("can not place orders") - return +func (s *Strategy) generateGridBuyOrders(session *bbgo.ExchangeSession) ([]types.SubmitOrder, error) { + balances := session.GetAccount().Balances() + quoteBalance := balances[s.Market.QuoteCurrency].Available + if quoteBalance.Sign() <= 0 { + return nil, fmt.Errorf("quote balance %s is zero: %v", s.Market.QuoteCurrency, quoteBalance) } - s.activeOrders.Add(orders...) - s.orders.Add(orders...) -} + upBand, downBand := s.boll.LastUpBand(), s.boll.LastDownBand() + if upBand <= 0.0 { + return nil, fmt.Errorf("up band == 0") + } + if downBand <= 0.0 { + return nil, fmt.Errorf("down band == 0") + } -func (s *Strategy) updateAskOrders(orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) { - baseCurrency := s.Market.BaseCurrency - balances := session.Account.Balances() + currentPrice, ok := session.LastPrice(s.Symbol) + if !ok { + return nil, fmt.Errorf("last price not found") + } - balance, ok := balances[baseCurrency] - if !ok || balance.Available <= 0 { - return + if currentPrice.Float64() > upBand || currentPrice.Float64() < downBand { + return nil, fmt.Errorf("current price %v exceed the bollinger band %f <> %f", currentPrice, upBand, downBand) } - var upBand = s.boll.LastUpBand() - if upBand <= 0.0 { - return + ema99 := s.StandardIndicatorSet.EWMA(types.IntervalWindow{Interval: s.Interval, Window: 99}) + ema25 := s.StandardIndicatorSet.EWMA(types.IntervalWindow{Interval: s.Interval, Window: 25}) + ema7 := s.StandardIndicatorSet.EWMA(types.IntervalWindow{Interval: s.Interval, Window: 7}) + if ema7.Last() > ema25.Last()*1.001 && ema25.Last() > ema99.Last()*1.0005 { + log.Infof("all ema lines trend up, skip buy") + return nil, nil } - var startPrice = upBand + priceRange := upBand - downBand + gridSize := priceRange / float64(s.GridNum) - var submitOrders []types.SubmitOrder - for i := 0; i < s.GridNum; i++ { - submitOrders = append(submitOrders, types.SubmitOrder{ + var orders []types.SubmitOrder + for pricef := upBand; pricef >= downBand; pricef -= gridSize { + if pricef >= currentPrice.Float64() { + continue + } + price := fixedpoint.NewFromFloat(pricef) + // adjust buy quantity using current quote balance + quantity := bbgo.AdjustFloatQuantityByMaxAmount(s.Quantity, price, quoteBalance) + order := types.SubmitOrder{ Symbol: s.Symbol, - Side: types.SideTypeSell, + Side: types.SideTypeBuy, Type: types.OrderTypeLimit, Market: s.Market, - Quantity: s.Quantity, - Price: startPrice, - TimeInForce: "GTC", - }) - - startPrice += s.GridPips.Float64() - } - - orders, err := orderExecutor.SubmitOrders(context.Background(), submitOrders...) - if err != nil { - log.WithError(err).Errorf("can not place orders") - return + Quantity: quantity, + Price: price, + TimeInForce: types.TimeInForceGTC, + } + quoteQuantity := order.Quantity.Mul(price) + if quantity.Compare(s.MinQuantity) < 0 { + // don't submit this order if buy quantity is too small + log.Infof("quote balance %v is not enough, stop generating buy orders", quoteBalance) + break + } + quoteBalance = quoteBalance.Sub(quoteQuantity) + log.Infof("submitting order: %s", order.String()) + orders = append(orders, order) } - - s.orders.Add(orders...) - s.activeOrders.Add(orders...) + return orders, nil } -func (s *Strategy) placeGridOrders(orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) { - quoteCurrency := s.Market.QuoteCurrency - balances := session.Account.Balances() - - balance, ok := balances[quoteCurrency] - if !ok || balance.Available <= 0 { - return +func (s *Strategy) generateGridSellOrders(session *bbgo.ExchangeSession) ([]types.SubmitOrder, error) { + balances := session.GetAccount().Balances() + baseBalance := balances[s.Market.BaseCurrency].Available + if baseBalance.Sign() <= 0 { + return nil, fmt.Errorf("base balance %s is zero: %+v", s.Market.BaseCurrency, baseBalance) } - var upBand = s.boll.LastUpBand() + upBand, downBand := s.boll.LastUpBand(), s.boll.LastDownBand() if upBand <= 0.0 { - log.Warnf("up band == 0") - return + return nil, fmt.Errorf("up band == 0") } - - var downBand = s.boll.LastDownBand() if downBand <= 0.0 { - log.Warnf("down band == 0") - return + return nil, fmt.Errorf("down band == 0") } currentPrice, ok := session.LastPrice(s.Symbol) if !ok { - log.Warnf("last price not found") - return + return nil, fmt.Errorf("last price not found") } - if currentPrice > upBand || currentPrice < downBand { - log.Warnf("current price exceed the bollinger band") - return + currentPricef := currentPrice.Float64() + + if currentPricef > upBand || currentPricef < downBand { + return nil, fmt.Errorf("current price exceed the bollinger band") } ema99 := s.StandardIndicatorSet.EWMA(types.IntervalWindow{Interval: s.Interval, Window: 99}) ema25 := s.StandardIndicatorSet.EWMA(types.IntervalWindow{Interval: s.Interval, Window: 25}) ema7 := s.StandardIndicatorSet.EWMA(types.IntervalWindow{Interval: s.Interval, Window: 7}) + if ema7.Last() < ema25.Last()*(1-0.004) && ema25.Last() < ema99.Last()*(1-0.0005) { + log.Infof("all ema lines trend down, skip sell") + return nil, nil + } priceRange := upBand - downBand gridSize := priceRange / float64(s.GridNum) var orders []types.SubmitOrder - for price := downBand; price <= upBand; price += gridSize { - var side types.SideType - if price > currentPrice { - side = types.SideTypeSell - } else { - side = types.SideTypeBuy - } - - // trend up - switch side { - - case types.SideTypeBuy: - if ema7.Last() > ema25.Last()*1.001 && ema25.Last() > ema99.Last()*1.0005 { - log.Infof("all ema lines trend up, skip buy") - continue - } - - case types.SideTypeSell: - if ema7.Last() < ema25.Last()*(1-0.004) && ema25.Last() < ema99.Last()*(1-0.0005) { - log.Infof("all ema lines trend down, skip sell") - continue - } + for pricef := downBand; pricef <= upBand; pricef += gridSize { + if pricef <= currentPricef { + continue } - + price := fixedpoint.NewFromFloat(pricef) + // adjust sell quantity using current base balance + quantity := fixedpoint.Min(s.Quantity, baseBalance) order := types.SubmitOrder{ Symbol: s.Symbol, - Side: side, + Side: types.SideTypeSell, Type: types.OrderTypeLimit, Market: s.Market, - Quantity: s.Quantity, + Quantity: quantity, Price: price, - TimeInForce: "GTC", + TimeInForce: types.TimeInForceGTC, } + baseQuantity := order.Quantity + if quantity.Compare(s.MinQuantity) < 0 { + // don't submit this order if sell quantity is too small + log.Infof("base balance %s is not enough, stop generating sell orders", baseBalance) + break + } + baseBalance = baseBalance.Sub(baseQuantity) log.Infof("submitting order: %s", order.String()) orders = append(orders, order) } + return orders, nil +} - createdOrders, err := orderExecutor.SubmitOrders(context.Background(), orders...) +func (s *Strategy) placeGridOrders(orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) { + sellOrders, err := s.generateGridSellOrders(session) if err != nil { - log.WithError(err).Errorf("can not place orders") - return + log.Warn(err.Error()) + } + createdSellOrders, err := orderExecutor.SubmitOrders(context.Background(), sellOrders...) + if err != nil { + log.WithError(err).Errorf("can not place sell orders") + } + + buyOrders, err := s.generateGridBuyOrders(session) + if err != nil { + log.Warn(err.Error()) + } + createdBuyOrders, err := orderExecutor.SubmitOrders(context.Background(), buyOrders...) + if err != nil { + log.WithError(err).Errorf("can not place buy orders") } + createdOrders := append(createdSellOrders, createdBuyOrders...) s.activeOrders.Add(createdOrders...) s.orders.Add(createdOrders...) } func (s *Strategy) updateOrders(orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) { - if err := session.Exchange.CancelOrders(context.Background(), s.activeOrders.Orders()...); err != nil { + if err := orderExecutor.CancelOrders(context.Background(), s.activeOrders.Orders()...); err != nil { log.WithError(err).Errorf("cancel order error") } @@ -271,26 +283,32 @@ func (s *Strategy) updateOrders(orderExecutor bbgo.OrderExecutor, session *bbgo. s.activeOrders.Print() } -func (s *Strategy) submitReverseOrder(order types.Order) { +func (s *Strategy) submitReverseOrder(order types.Order, session *bbgo.ExchangeSession) { + balances := session.GetAccount().Balances() + var side = order.Side.Reverse() var price = order.Price + var quantity = order.Quantity switch side { case types.SideTypeSell: - price += s.ProfitSpread.Float64() + price = price.Add(s.ProfitSpread) + maxQuantity := balances[s.Market.BaseCurrency].Available + quantity = fixedpoint.Min(quantity, maxQuantity) case types.SideTypeBuy: - price -= s.ProfitSpread.Float64() - + price = price.Sub(s.ProfitSpread) + maxQuantity := balances[s.Market.QuoteCurrency].Available.Div(price) + quantity = fixedpoint.Min(quantity, maxQuantity) } submitOrder := types.SubmitOrder{ Symbol: s.Symbol, Side: side, Type: types.OrderTypeLimit, - Quantity: order.Quantity, + Quantity: quantity, Price: price, - TimeInForce: "GTC", + TimeInForce: types.TimeInForceGTC, } log.Infof("submitting reverse order: %s against %s", submitOrder.String(), order.String()) @@ -316,20 +334,20 @@ func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, se }, 2.0) s.orders = bbgo.NewOrderStore(s.Symbol) - s.orders.BindStream(session.Stream) + s.orders.BindStream(session.UserDataStream) // we don't persist orders so that we can not clear the previous orders for now. just need time to support this. - s.activeOrders = bbgo.NewLocalActiveOrderBook() + s.activeOrders = bbgo.NewActiveOrderBook(s.Symbol) s.activeOrders.OnFilled(func(o types.Order) { - s.submitReverseOrder(o) + s.submitReverseOrder(o, session) }) - s.activeOrders.BindStream(session.Stream) + s.activeOrders.BindStream(session.UserDataStream) - s.profitOrders = bbgo.NewLocalActiveOrderBook() + s.profitOrders = bbgo.NewActiveOrderBook(s.Symbol) s.profitOrders.OnFilled(func(o types.Order) { // we made profit here! }) - s.profitOrders.BindStream(session.Stream) + s.profitOrders.BindStream(session.UserDataStream) // setup graceful shutting down handler s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { @@ -337,22 +355,27 @@ func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, se defer wg.Done() log.Infof("canceling active orders...") - if err := session.Exchange.CancelOrders(ctx, s.activeOrders.Orders()...); err != nil { + if err := orderExecutor.CancelOrders(ctx, s.activeOrders.Orders()...); err != nil { log.WithError(err).Errorf("cancel order error") } - if err := session.Exchange.CancelOrders(ctx, s.profitOrders.Orders()...); err != nil { - log.WithError(err).Errorf("cancel order error") + if s.CancelProfitOrdersOnShutdown { + log.Infof("canceling profit orders...") + err := orderExecutor.CancelOrders(ctx, s.profitOrders.Orders()...) + + if err != nil { + log.WithError(err).Errorf("cancel profit order error") + } } }) - session.Stream.OnConnect(func() { + session.UserDataStream.OnStart(func() { log.Infof("connected, submitting the first round of the orders") s.updateOrders(orderExecutor, session) }) // avoid using time ticker since we will need back testing here - session.Stream.OnKLineClosed(func(kline types.KLine) { + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { // skip kline events that does not belong to this symbol if kline.Symbol != s.Symbol { log.Infof("%s != %s", kline.Symbol, s.Symbol) diff --git a/pkg/strategy/bollmaker/dynamic_spread.go b/pkg/strategy/bollmaker/dynamic_spread.go new file mode 100644 index 0000000000..ed18b72cfb --- /dev/null +++ b/pkg/strategy/bollmaker/dynamic_spread.go @@ -0,0 +1,84 @@ +package bollmaker + +import ( + "github.com/pkg/errors" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" +) + +type DynamicSpreadSettings struct { + Enabled bool `json:"enabled"` + + // Window is the window of the SMAs of spreads + Window int `json:"window"` + + // AskSpreadScale is used to define the ask spread range with the given percentage. + AskSpreadScale *bbgo.PercentageScale `json:"askSpreadScale"` + + // BidSpreadScale is used to define the bid spread range with the given percentage. + BidSpreadScale *bbgo.PercentageScale `json:"bidSpreadScale"` + + DynamicAskSpread *indicator.SMA + DynamicBidSpread *indicator.SMA +} + +// Update dynamic spreads +func (ds *DynamicSpreadSettings) Update(kline types.KLine) { + if !ds.Enabled { + return + } + + ampl := (kline.GetHigh().Float64() - kline.GetLow().Float64()) / kline.GetOpen().Float64() + + switch kline.Direction() { + case types.DirectionUp: + ds.DynamicAskSpread.Update(ampl) + ds.DynamicBidSpread.Update(0) + case types.DirectionDown: + ds.DynamicBidSpread.Update(ampl) + ds.DynamicAskSpread.Update(0) + default: + ds.DynamicAskSpread.Update(0) + ds.DynamicBidSpread.Update(0) + } +} + +// GetAskSpread returns current ask spread +func (ds *DynamicSpreadSettings) GetAskSpread() (askSpread float64, err error) { + if !ds.Enabled { + return 0, errors.New("dynamic spread is not enabled") + } + + if ds.AskSpreadScale != nil && ds.DynamicAskSpread.Length() >= ds.Window { + askSpread, err = ds.AskSpreadScale.Scale(ds.DynamicAskSpread.Last()) + if err != nil { + log.WithError(err).Errorf("can not calculate dynamicAskSpread") + return 0, err + } + + return askSpread, nil + } + + return 0, errors.New("incomplete dynamic spread settings or not enough data yet") +} + +// GetBidSpread returns current dynamic bid spread +func (ds *DynamicSpreadSettings) GetBidSpread() (bidSpread float64, err error) { + if !ds.Enabled { + return 0, errors.New("dynamic spread is not enabled") + } + + if ds.BidSpreadScale != nil && ds.DynamicBidSpread.Length() >= ds.Window { + bidSpread, err = ds.BidSpreadScale.Scale(ds.DynamicBidSpread.Last()) + if err != nil { + log.WithError(err).Errorf("can not calculate dynamicBidSpread") + return 0, err + } + + return bidSpread, nil + } + + return 0, errors.New("incomplete dynamic spread settings or not enough data yet") +} diff --git a/pkg/strategy/bollmaker/strategy.go b/pkg/strategy/bollmaker/strategy.go new file mode 100644 index 0000000000..9dec53ca95 --- /dev/null +++ b/pkg/strategy/bollmaker/strategy.go @@ -0,0 +1,689 @@ +package bollmaker + +import ( + "context" + "fmt" + "math" + "sync" + + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/util" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +// TODO: +// 1) add option for placing orders only when in neutral band +// 2) add option for only placing buy orders when price is below the SMA line + +const ID = "bollmaker" + +const stateKey = "state-v1" + +var notionModifier = fixedpoint.NewFromFloat(1.1) +var two = fixedpoint.NewFromInt(2) + +var log = logrus.WithField("strategy", ID) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +// Deprecated: State is deprecated, please use the persistence tag +type State struct { + // Deprecated: Position is deprecated, please define the Position field in the strategy struct directly. + Position *types.Position `json:"position,omitempty"` + + // Deprecated: ProfitStats is deprecated, please define the ProfitStats field in the strategy struct directly. + ProfitStats types.ProfitStats `json:"profitStats,omitempty"` +} + +type BollingerSetting struct { + types.IntervalWindow + BandWidth float64 `json:"bandWidth"` +} + +type Strategy struct { + *bbgo.Graceful + *bbgo.Persistence + + Environment *bbgo.Environment + StandardIndicatorSet *bbgo.StandardIndicatorSet + Market types.Market + + // Symbol is the market symbol you want to trade + Symbol string `json:"symbol"` + + // Interval is how long do you want to update your order price and quantity + Interval types.Interval `json:"interval"` + + bbgo.QuantityOrAmount + + // Spread is the price spread from the middle price. + // For ask orders, the ask price is ((bestAsk + bestBid) / 2 * (1.0 + spread)) + // For bid orders, the bid price is ((bestAsk + bestBid) / 2 * (1.0 - spread)) + // Spread can be set by percentage or floating number. e.g., 0.1% or 0.001 + Spread fixedpoint.Value `json:"spread"` + + // BidSpread overrides the spread setting, this spread will be used for the buy order + BidSpread fixedpoint.Value `json:"bidSpread,omitempty"` + + // AskSpread overrides the spread setting, this spread will be used for the sell order + AskSpread fixedpoint.Value `json:"askSpread,omitempty"` + + // DynamicSpread enables the automatic adjustment to bid and ask spread. + DynamicSpread DynamicSpreadSettings `json:"dynamicSpread,omitempty"` + + // MinProfitSpread is the minimal order price spread from the current average cost. + // For long position, you will only place sell order above the price (= average cost * (1 + minProfitSpread)) + // For short position, you will only place buy order below the price (= average cost * (1 - minProfitSpread)) + MinProfitSpread fixedpoint.Value `json:"minProfitSpread"` + + // UseTickerPrice use the ticker api to get the mid price instead of the closed kline price. + // The back-test engine is kline-based, so the ticker price api is not supported. + // Turn this on if you want to do real trading. + UseTickerPrice bool `json:"useTickerPrice"` + + // MaxExposurePosition is the maximum position you can hold + // +10 means you can hold 10 ETH long position by maximum + // -10 means you can hold -10 ETH short position by maximum + MaxExposurePosition fixedpoint.Value `json:"maxExposurePosition"` + + // DynamicExposurePositionScale is used to define the exposure position range with the given percentage + // when DynamicExposurePositionScale is set, + // your MaxExposurePosition will be calculated dynamically according to the bollinger band you set. + DynamicExposurePositionScale *bbgo.PercentageScale `json:"dynamicExposurePositionScale"` + + // Long means your position will be long position + // Currently not used yet + Long *bool `json:"long,omitempty"` + + // Short means your position will be long position + // Currently not used yet + Short *bool `json:"short,omitempty"` + + // DisableShort means you can don't want short position during the market making + // Set to true if you want to hold more spot during market making. + DisableShort bool `json:"disableShort"` + + // BuyBelowNeutralSMA if true, the market maker will only place buy order when the current price is below the neutral band SMA. + BuyBelowNeutralSMA bool `json:"buyBelowNeutralSMA"` + + // NeutralBollinger is the smaller range of the bollinger band + // If price is in this band, it usually means the price is oscillating. + // If price goes out of this band, we tend to not place sell orders or buy orders + NeutralBollinger *BollingerSetting `json:"neutralBollinger"` + + // DefaultBollinger is the wide range of the bollinger band + // for controlling your exposure position + DefaultBollinger *BollingerSetting `json:"defaultBollinger"` + + // DowntrendSkew is the order quantity skew for normal downtrend band. + // The price is still in the default bollinger band. + // greater than 1.0 means when placing buy order, place sell order with less quantity + // less than 1.0 means when placing sell order, place buy order with less quantity + DowntrendSkew fixedpoint.Value `json:"downtrendSkew"` + + // UptrendSkew is the order quantity skew for normal uptrend band. + // The price is still in the default bollinger band. + // greater than 1.0 means when placing buy order, place sell order with less quantity + // less than 1.0 means when placing sell order, place buy order with less quantity + UptrendSkew fixedpoint.Value `json:"uptrendSkew"` + + // TradeInBand + // When this is on, places orders only when the current price is in the bollinger band. + TradeInBand bool `json:"tradeInBand"` + + // ShadowProtection is used to avoid placing bid order when price goes down strongly (without shadow) + ShadowProtection bool `json:"shadowProtection"` + ShadowProtectionRatio fixedpoint.Value `json:"shadowProtectionRatio"` + + bbgo.SmartStops + + session *bbgo.ExchangeSession + book *types.StreamOrderBook + + state *State + + // persistence fields + Position *types.Position `json:"position,omitempty" persistence:"position"` + ProfitStats *types.ProfitStats `json:"profitStats,omitempty" persistence:"profit_stats"` + + orderExecutor *bbgo.GeneralOrderExecutor + + groupID uint32 + + stopC chan struct{} + + // defaultBoll is the BOLLINGER indicator we used for predicting the price. + defaultBoll *indicator.BOLL + + // neutralBoll is the neutral price section + neutralBoll *indicator.BOLL + + // StrategyController + bbgo.StrategyController +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) InstanceID() string { + return fmt.Sprintf("%s:%s", ID, s.Symbol) +} + +func (s *Strategy) Initialize() error { + return s.SmartStops.InitializeStopControllers(s.Symbol) +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{ + Interval: s.Interval, + }) + + if s.DefaultBollinger != nil && s.DefaultBollinger.Interval != "" { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{ + Interval: s.DefaultBollinger.Interval, + }) + } + + if s.NeutralBollinger != nil && s.NeutralBollinger.Interval != "" { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{ + Interval: s.NeutralBollinger.Interval, + }) + } + + s.SmartStops.Subscribe(session) +} + +func (s *Strategy) Validate() error { + if len(s.Symbol) == 0 { + return errors.New("symbol is required") + } + + return nil +} + +func (s *Strategy) CurrentPosition() *types.Position { + return s.Position +} + +func (s *Strategy) ClosePosition(ctx context.Context, percentage fixedpoint.Value) error { + base := s.Position.GetBase() + if base.IsZero() { + return fmt.Errorf("no opened %s position", s.Position.Symbol) + } + + // make it negative + quantity := base.Mul(percentage).Abs() + side := types.SideTypeBuy + if base.Sign() > 0 { + side = types.SideTypeSell + } + + if quantity.Compare(s.Market.MinQuantity) < 0 { + return fmt.Errorf("order quantity %v is too small, less than %v", quantity, s.Market.MinQuantity) + } + + submitOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: side, + Type: types.OrderTypeMarket, + Quantity: quantity, + Market: s.Market, + } + + bbgo.Notify("Submitting %s %s order to close position by %v", s.Symbol, side.String(), percentage, submitOrder) + + _, err := s.orderExecutor.SubmitOrders(ctx, submitOrder) + return err +} + +// Deprecated: LoadState method is migrated to the persistence struct tag. +func (s *Strategy) LoadState() error { + var state State + + // load position + if err := s.Persistence.Load(&state, ID, s.Symbol, stateKey); err == nil { + s.state = &state + } + + return nil +} + +func (s *Strategy) getCurrentAllowedExposurePosition(bandPercentage float64) (fixedpoint.Value, error) { + if s.DynamicExposurePositionScale != nil { + v, err := s.DynamicExposurePositionScale.Scale(bandPercentage) + if err != nil { + return fixedpoint.Zero, err + } + return fixedpoint.NewFromFloat(v), nil + } + + return s.MaxExposurePosition, nil +} + +func (s *Strategy) placeOrders(ctx context.Context, midPrice fixedpoint.Value, kline *types.KLine) { + bidSpread := s.Spread + if s.BidSpread.Sign() > 0 { + bidSpread = s.BidSpread + } + + askSpread := s.Spread + if s.AskSpread.Sign() > 0 { + askSpread = s.AskSpread + } + + askPrice := midPrice.Mul(fixedpoint.One.Add(askSpread)) + bidPrice := midPrice.Mul(fixedpoint.One.Sub(bidSpread)) + base := s.Position.GetBase() + balances := s.session.GetAccount().Balances() + + log.Infof("mid price:%v spread: %s ask:%v bid: %v position: %s", + midPrice, + s.Spread.Percentage(), + askPrice, + bidPrice, + s.Position, + ) + + sellQuantity := s.QuantityOrAmount.CalculateQuantity(askPrice) + buyQuantity := s.QuantityOrAmount.CalculateQuantity(bidPrice) + + sellOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeSell, + Type: types.OrderTypeLimitMaker, + Quantity: sellQuantity, + Price: askPrice, + Market: s.Market, + GroupID: s.groupID, + } + buyOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeBuy, + Type: types.OrderTypeLimitMaker, + Quantity: buyQuantity, + Price: bidPrice, + Market: s.Market, + GroupID: s.groupID, + } + + var submitOrders []types.SubmitOrder + + baseBalance, hasBaseBalance := balances[s.Market.BaseCurrency] + quoteBalance, hasQuoteBalance := balances[s.Market.QuoteCurrency] + + downBand := s.defaultBoll.LastDownBand() + upBand := s.defaultBoll.LastUpBand() + sma := s.defaultBoll.LastSMA() + log.Infof("%s bollinger band: up %f sma %f down %f", s.Symbol, upBand, sma, downBand) + + bandPercentage := calculateBandPercentage(upBand, downBand, sma, midPrice.Float64()) + log.Infof("%s mid price band percentage: %v", s.Symbol, bandPercentage) + + maxExposurePosition, err := s.getCurrentAllowedExposurePosition(bandPercentage) + if err != nil { + log.WithError(err).Errorf("can not calculate %s CurrentAllowedExposurePosition", s.Symbol) + return + } + + log.Infof("calculated %s max exposure position: %v", s.Symbol, maxExposurePosition) + + if !s.Position.IsClosed() && !s.Position.IsDust(midPrice) { + log.Infof("current %s unrealized profit: %f %s", s.Symbol, s.Position.UnrealizedProfit(midPrice).Float64(), s.Market.QuoteCurrency) + } + + canSell := true + canBuy := true + + if maxExposurePosition.Sign() > 0 && base.Compare(maxExposurePosition) > 0 { + canBuy = false + } + + if maxExposurePosition.Sign() > 0 { + if s.hasLongSet() && base.Sign() < 0 { + canSell = false + } else if base.Compare(maxExposurePosition.Neg()) < 0 { + canSell = false + } + } + + if s.ShadowProtection && kline != nil { + switch kline.Direction() { + case types.DirectionDown: + shadowHeight := kline.GetLowerShadowHeight() + shadowRatio := kline.GetLowerShadowRatio() + if shadowHeight.IsZero() && shadowRatio.Compare(s.ShadowProtectionRatio) < 0 { + log.Infof("%s shadow protection enabled, lower shadow ratio %v < %v", s.Symbol, shadowRatio, s.ShadowProtectionRatio) + canBuy = false + } + case types.DirectionUp: + shadowHeight := kline.GetUpperShadowHeight() + shadowRatio := kline.GetUpperShadowRatio() + if shadowHeight.IsZero() || shadowRatio.Compare(s.ShadowProtectionRatio) < 0 { + log.Infof("%s shadow protection enabled, upper shadow ratio %v < %v", s.Symbol, shadowRatio, s.ShadowProtectionRatio) + canSell = false + } + } + } + + // Apply quantity skew + // CASE #1: + // WHEN: price is in the neutral bollginer band (window 1) == neutral + // THEN: we don't apply skew + // CASE #2: + // WHEN: price is in the upper band (window 2 > price > window 1) == upTrend + // THEN: we apply upTrend skew + // CASE #3: + // WHEN: price is in the lower band (window 2 < price < window 1) == downTrend + // THEN: we apply downTrend skew + // CASE #4: + // WHEN: price breaks the lower band (price < window 2) == strongDownTrend + // THEN: we apply strongDownTrend skew + // CASE #5: + // WHEN: price breaks the upper band (price > window 2) == strongUpTrend + // THEN: we apply strongUpTrend skew + if s.TradeInBand { + if !inBetween(midPrice.Float64(), s.neutralBoll.LastDownBand(), s.neutralBoll.LastUpBand()) { + log.Infof("tradeInBand is set, skip placing orders when the price is outside of the band") + return + } + } + + trend := detectPriceTrend(s.neutralBoll, midPrice.Float64()) + switch trend { + case NeutralTrend: + // do nothing + + case UpTrend: + skew := s.UptrendSkew + buyOrder.Quantity = fixedpoint.Max(s.Market.MinQuantity, sellOrder.Quantity.Mul(skew)) + + case DownTrend: + skew := s.DowntrendSkew + ratio := fixedpoint.One.Div(skew) + sellOrder.Quantity = fixedpoint.Max(s.Market.MinQuantity, buyOrder.Quantity.Mul(ratio)) + + } + + if !hasQuoteBalance || buyOrder.Quantity.Mul(buyOrder.Price).Compare(quoteBalance.Available) > 0 { + canBuy = false + } + + if !hasBaseBalance || sellOrder.Quantity.Compare(baseBalance.Available) > 0 { + canSell = false + } + + isLongPosition := s.Position.IsLong() + isShortPosition := s.Position.IsShort() + minProfitPrice := s.Position.AverageCost.Mul(fixedpoint.One.Add(s.MinProfitSpread)) + if isShortPosition { + minProfitPrice = s.Position.AverageCost.Mul(fixedpoint.One.Sub(s.MinProfitSpread)) + } + + if isLongPosition { + // for long position if the current price is lower than the minimal profitable price then we should stop sell + if midPrice.Compare(minProfitPrice) < 0 { + canSell = false + } + } else if isShortPosition { + // for short position if the current price is higher than the minimal profitable price then we should stop buy + if midPrice.Compare(minProfitPrice) > 0 { + canBuy = false + } + } + + if s.hasLongSet() && base.Sub(sellOrder.Quantity).Sign() < 0 { + canSell = false + } + + if s.BuyBelowNeutralSMA && midPrice.Float64() > s.neutralBoll.LastSMA() { + canBuy = false + } + + if canSell { + submitOrders = append(submitOrders, sellOrder) + } + if canBuy { + submitOrders = append(submitOrders, buyOrder) + } + + // condition for lower the average cost + /* + if midPrice < s.Position.AverageCost.MulFloat64(1.0-s.MinProfitSpread.Float64()) && canBuy { + submitOrders = append(submitOrders, buyOrder) + } + */ + + if len(submitOrders) == 0 { + return + } + + for i := range submitOrders { + submitOrders[i] = adjustOrderQuantity(submitOrders[i], s.Market) + } + + _, _ = s.orderExecutor.SubmitOrders(ctx, submitOrders...) +} + +func (s *Strategy) hasLongSet() bool { + return s.Long != nil && *s.Long +} + +func (s *Strategy) hasShortSet() bool { + return s.Short != nil && *s.Short +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + // StrategyController + s.Status = types.StrategyStatusRunning + + // Setup dynamic spread + if s.DynamicSpread.Enabled { + s.DynamicSpread.DynamicBidSpread = &indicator.SMA{IntervalWindow: types.IntervalWindow{s.Interval, s.DynamicSpread.Window}} + s.DynamicSpread.DynamicAskSpread = &indicator.SMA{IntervalWindow: types.IntervalWindow{s.Interval, s.DynamicSpread.Window}} + } + + s.OnSuspend(func() { + s.Status = types.StrategyStatusStopped + _ = s.orderExecutor.GracefulCancel(ctx) + _ = s.Persistence.Sync(s) + }) + + s.OnEmergencyStop(func() { + // Close 100% position + percentage := fixedpoint.NewFromFloat(1.0) + _ = s.ClosePosition(ctx, percentage) + }) + + if s.DisableShort { + s.Long = &[]bool{true}[0] + } + + if s.MinProfitSpread.IsZero() { + s.MinProfitSpread = fixedpoint.NewFromFloat(0.001) + } + + if s.UptrendSkew.IsZero() { + s.UptrendSkew = fixedpoint.NewFromFloat(1.0 / 1.2) + } + + if s.DowntrendSkew.IsZero() { + s.DowntrendSkew = fixedpoint.NewFromFloat(1.2) + } + + if s.ShadowProtectionRatio.IsZero() { + s.ShadowProtectionRatio = fixedpoint.NewFromFloat(0.01) + } + + // initial required information + s.session = session + + s.neutralBoll = s.StandardIndicatorSet.BOLL(s.NeutralBollinger.IntervalWindow, s.NeutralBollinger.BandWidth) + s.defaultBoll = s.StandardIndicatorSet.BOLL(s.DefaultBollinger.IntervalWindow, s.DefaultBollinger.BandWidth) + + // calculate group id for orders + instanceID := s.InstanceID() + s.groupID = util.FNV32(instanceID) + + // If position is nil, we need to allocate a new position for calculation + if s.Position == nil { + // restore state (legacy) + if err := s.LoadState(); err != nil { + return err + } + + // fallback to the legacy position struct in the state + if s.state != nil && s.state.Position != nil && !s.state.Position.Base.IsZero() { + s.Position = s.state.Position + } else { + s.Position = types.NewPositionFromMarket(s.Market) + } + } + + if s.session.MakerFeeRate.Sign() > 0 || s.session.TakerFeeRate.Sign() > 0 { + s.Position.SetExchangeFeeRate(s.session.ExchangeName, types.ExchangeFee{ + MakerFeeRate: s.session.MakerFeeRate, + TakerFeeRate: s.session.TakerFeeRate, + }) + } + + if s.ProfitStats == nil { + if s.state != nil { + // copy profit stats + p2 := s.state.ProfitStats + s.ProfitStats = &p2 + } else { + s.ProfitStats = types.NewProfitStats(s.Market) + } + } + + // Always update the position fields + s.Position.Strategy = ID + s.Position.StrategyInstanceID = instanceID + + s.orderExecutor = bbgo.NewGeneralOrderExecutor(session, s.Symbol, ID, instanceID, s.Position) + s.orderExecutor.BindEnvironment(s.Environment) + s.orderExecutor.BindProfitStats(s.ProfitStats) + s.orderExecutor.Bind() + + s.stopC = make(chan struct{}) + + // TODO: migrate persistance to singleton + s.orderExecutor.TradeCollector().OnPositionUpdate(func(position *types.Position) { + if err := s.Persistence.Sync(s); err != nil { + log.WithError(err).Errorf("can not sync state to persistence") + } + }) + + s.SmartStops.RunStopControllers(ctx, session, s.orderExecutor.TradeCollector()) + + if s.Environment.IsBackTesting() { + log.Warn("turning of useTickerPrice option in the back-testing environment...") + s.UseTickerPrice = false + } + + session.UserDataStream.OnStart(func() { + if s.UseTickerPrice { + ticker, err := s.session.Exchange.QueryTicker(ctx, s.Symbol) + if err != nil { + return + } + + midPrice := ticker.Buy.Add(ticker.Sell).Div(two) + s.placeOrders(ctx, midPrice, nil) + } else { + if price, ok := session.LastPrice(s.Symbol); ok { + s.placeOrders(ctx, price, nil) + } + } + }) + + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + // StrategyController + if s.Status != types.StrategyStatusRunning { + return + } + + if kline.Symbol != s.Symbol || kline.Interval != s.Interval { + return + } + + // Update spreads with dynamic spread + if s.DynamicSpread.Enabled { + s.DynamicSpread.Update(kline) + dynamicBidSpread, err := s.DynamicSpread.GetBidSpread() + if err == nil && dynamicBidSpread > 0 { + s.BidSpread = fixedpoint.NewFromFloat(dynamicBidSpread) + log.Infof("new bid spread: %v", s.BidSpread.Percentage()) + } + dynamicAskSpread, err := s.DynamicSpread.GetAskSpread() + if err == nil && dynamicAskSpread > 0 { + s.AskSpread = fixedpoint.NewFromFloat(dynamicAskSpread) + log.Infof("new ask spread: %v", s.AskSpread.Percentage()) + } + } + + _ = s.orderExecutor.GracefulCancel(ctx) + + if s.UseTickerPrice { + ticker, err := s.session.Exchange.QueryTicker(ctx, s.Symbol) + if err != nil { + return + } + + midPrice := ticker.Buy.Add(ticker.Sell).Div(two) + log.Infof("using ticker price: bid %v / ask %v, mid price %v", ticker.Buy, ticker.Sell, midPrice) + s.placeOrders(ctx, midPrice, &kline) + } else { + s.placeOrders(ctx, kline.Close, &kline) + } + }) + + // s.book = types.NewStreamBook(s.Symbol) + // s.book.BindStreamForBackground(session.MarketDataStream) + + s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { + defer wg.Done() + close(s.stopC) + + _ = s.orderExecutor.GracefulCancel(ctx) + }) + + return nil +} + +func calculateBandPercentage(up, down, sma, midPrice float64) float64 { + if midPrice < sma { + // should be negative percentage + return (midPrice - sma) / math.Abs(sma-down) + } else if midPrice > sma { + // should be positive percentage + return (midPrice - sma) / math.Abs(up-sma) + } + + return 0.0 +} + +func inBetween(x, a, b float64) bool { + return a < x && x < b +} + +func adjustOrderQuantity(submitOrder types.SubmitOrder, market types.Market) types.SubmitOrder { + if submitOrder.Quantity.Mul(submitOrder.Price).Compare(market.MinNotional) < 0 { + submitOrder.Quantity = bbgo.AdjustFloatQuantityByMinAmount(submitOrder.Quantity, submitOrder.Price, market.MinNotional.Mul(notionModifier)) + } + + if submitOrder.Quantity.Compare(market.MinQuantity) < 0 { + submitOrder.Quantity = fixedpoint.Max(submitOrder.Quantity, market.MinQuantity) + } + + return submitOrder +} diff --git a/pkg/strategy/bollmaker/strategy_test.go b/pkg/strategy/bollmaker/strategy_test.go new file mode 100644 index 0000000000..5f201238a0 --- /dev/null +++ b/pkg/strategy/bollmaker/strategy_test.go @@ -0,0 +1,69 @@ +package bollmaker + +import ( + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +func Test_calculateBandPercentage(t *testing.T) { + type args struct { + up float64 + down float64 + sma float64 + midPrice float64 + } + tests := []struct { + name string + args args + want fixedpoint.Value + }{ + { + name: "positive boundary", + args: args{ + up: 2000.0, + sma: 1500.0, + down: 1000.0, + midPrice: 2000.0, + }, + want: fixedpoint.NewFromFloat(1.0), + }, + { + name: "inside positive boundary", + args: args{ + up: 2000.0, + sma: 1500.0, + down: 1000.0, + midPrice: 1600.0, + }, + want: fixedpoint.NewFromFloat(0.2), // 20% + }, + { + name: "negative boundary", + args: args{ + up: 2000.0, + sma: 1500.0, + down: 1000.0, + midPrice: 1000.0, + }, + want: fixedpoint.NewFromFloat(-1.0), + }, + { + name: "out of negative boundary", + args: args{ + up: 2000.0, + sma: 1500.0, + down: 1000.0, + midPrice: 800.0, + }, + want: fixedpoint.NewFromFloat(-1.4), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := calculateBandPercentage(tt.args.up, tt.args.down, tt.args.sma, tt.args.midPrice); fixedpoint.NewFromFloat(got) != tt.want { + t.Errorf("calculateBandPercentage() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/strategy/bollmaker/trend.go b/pkg/strategy/bollmaker/trend.go new file mode 100644 index 0000000000..654ac4cce4 --- /dev/null +++ b/pkg/strategy/bollmaker/trend.go @@ -0,0 +1,28 @@ +package bollmaker + +import "github.com/c9s/bbgo/pkg/indicator" + +type PriceTrend string + +const ( + NeutralTrend PriceTrend = "neutral" + UpTrend PriceTrend = "upTrend" + DownTrend PriceTrend = "downTrend" + UnknownTrend PriceTrend = "unknown" +) + +func detectPriceTrend(inc *indicator.BOLL, price float64) PriceTrend { + if inBetween(price, inc.LastDownBand(), inc.LastUpBand()) { + return NeutralTrend + } + + if price < inc.LastDownBand() { + return DownTrend + } + + if price > inc.LastUpBand() { + return UpTrend + } + + return UnknownTrend +} diff --git a/pkg/strategy/dca/strategy.go b/pkg/strategy/dca/strategy.go new file mode 100644 index 0000000000..932eb570ef --- /dev/null +++ b/pkg/strategy/dca/strategy.go @@ -0,0 +1,247 @@ +package dca + +import ( + "context" + "fmt" + "time" + + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +const ID = "dca" + +var log = logrus.WithField("strategy", ID) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type BudgetPeriod string + +const ( + BudgetPeriodDay BudgetPeriod = "day" + BudgetPeriodWeek BudgetPeriod = "week" + BudgetPeriodMonth BudgetPeriod = "month" +) + +func (b BudgetPeriod) Duration() time.Duration { + var period time.Duration + switch b { + case BudgetPeriodDay: + period = 24 * time.Hour + + case BudgetPeriodWeek: + period = 24 * time.Hour * 7 + + case BudgetPeriodMonth: + period = 24 * time.Hour * 30 + + } + + return period +} + +// Strategy is the Dollar-Cost-Average strategy +type Strategy struct { + *bbgo.Graceful + *bbgo.Persistence + + Environment *bbgo.Environment + Symbol string `json:"symbol"` + Market types.Market + + // BudgetPeriod is how long your budget quota will be reset. + // day, week, month + BudgetPeriod BudgetPeriod `json:"budgetPeriod"` + + // Budget is the amount you invest per budget period + Budget fixedpoint.Value `json:"budget"` + + // InvestmentInterval is the interval of each investment + InvestmentInterval types.Interval `json:"investmentInterval"` + + budgetPerInvestment fixedpoint.Value + + Position *types.Position `persistence:"position"` + ProfitStats *types.ProfitStats `persistence:"profit_stats"` + BudgetQuota fixedpoint.Value `persistence:"budget_quota"` + BudgetPeriodStartTime time.Time `persistence:"budget_period_start_time"` + + activeMakerOrders *bbgo.ActiveOrderBook + orderStore *bbgo.OrderStore + tradeCollector *bbgo.TradeCollector + + session *bbgo.ExchangeSession + + bbgo.StrategyController +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.InvestmentInterval}) +} + +func (s *Strategy) submitOrders(ctx context.Context, orderExecutor bbgo.OrderExecutor, submitOrders ...types.SubmitOrder) { + createdOrders, err := orderExecutor.SubmitOrders(ctx, submitOrders...) + if err != nil { + log.WithError(err).Errorf("can not place orders") + } + + s.orderStore.Add(createdOrders...) + s.activeMakerOrders.Add(createdOrders...) + s.tradeCollector.Process() +} + +func (s *Strategy) ClosePosition(ctx context.Context, percentage fixedpoint.Value) error { + base := s.Position.GetBase() + if base.IsZero() { + return fmt.Errorf("no opened %s position", s.Position.Symbol) + } + + // make it negative + quantity := base.Mul(percentage).Abs() + side := types.SideTypeBuy + if base.Sign() > 0 { + side = types.SideTypeSell + } + + if quantity.Compare(s.Market.MinQuantity) < 0 { + return fmt.Errorf("order quantity %v is too small, less than %v", quantity, s.Market.MinQuantity) + } + + submitOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: side, + Type: types.OrderTypeMarket, + Quantity: quantity, + Market: s.Market, + } + + // s.Notify("Submitting %s %s order to close position by %v", s.Symbol, side.String(), percentage, submitOrder) + + createdOrders, err := s.session.Exchange.SubmitOrders(ctx, submitOrder) + if err != nil { + log.WithError(err).Errorf("can not place position close order") + } + + s.orderStore.Add(createdOrders...) + s.activeMakerOrders.Add(createdOrders...) + s.tradeCollector.Process() + return err +} + +func (s *Strategy) InstanceID() string { + return fmt.Sprintf("%s:%s", ID, s.Symbol) +} + +// check if position can be close or not +func canClosePosition(position *types.Position, signal fixedpoint.Value, price fixedpoint.Value) bool { + return !signal.IsZero() && position.IsShort() && !position.IsDust(price) +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + // initial required information + s.session = session + + s.activeMakerOrders = bbgo.NewActiveOrderBook(s.Symbol) + s.activeMakerOrders.BindStream(session.UserDataStream) + + s.orderStore = bbgo.NewOrderStore(s.Symbol) + s.orderStore.BindStream(session.UserDataStream) + + if s.Position == nil { + s.Position = types.NewPositionFromMarket(s.Market) + } + + if s.ProfitStats == nil { + s.ProfitStats = types.NewProfitStats(s.Market) + } + + instanceID := s.InstanceID() + + if s.BudgetQuota.IsZero() { + s.BudgetQuota = s.Budget + } + + numOfInvestmentPerPeriod := fixedpoint.NewFromFloat(float64(s.BudgetPeriod.Duration()) / float64(s.InvestmentInterval.Duration())) + s.budgetPerInvestment = s.Budget.Div(numOfInvestmentPerPeriod) + + // Always update the position fields + s.Position.Strategy = ID + s.Position.StrategyInstanceID = instanceID + + s.tradeCollector = bbgo.NewTradeCollector(s.Symbol, s.Position, s.orderStore) + s.tradeCollector.OnTrade(func(trade types.Trade, profit, netProfit fixedpoint.Value) { + bbgo.Notify(trade) + s.ProfitStats.AddTrade(trade) + + if profit.Compare(fixedpoint.Zero) == 0 { + s.Environment.RecordPosition(s.Position, trade, nil) + } else { + log.Infof("%s generated profit: %v", s.Symbol, profit) + p := s.Position.NewProfit(trade, profit, netProfit) + p.Strategy = ID + p.StrategyInstanceID = instanceID + bbgo.Notify(&p) + + s.ProfitStats.AddProfit(p) + bbgo.Notify(&s.ProfitStats) + + s.Environment.RecordPosition(s.Position, trade, &p) + } + }) + + s.tradeCollector.OnTrade(func(trade types.Trade, profit fixedpoint.Value, netProfit fixedpoint.Value) { + s.BudgetQuota = s.BudgetQuota.Sub(trade.QuoteQuantity) + }) + + s.tradeCollector.OnPositionUpdate(func(position *types.Position) { + log.Infof("position changed: %s", s.Position) + bbgo.Notify(s.Position) + }) + + s.tradeCollector.BindStream(session.UserDataStream) + + session.UserDataStream.OnStart(func() {}) + session.MarketDataStream.OnKLine(func(kline types.KLine) {}) + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + if kline.Symbol != s.Symbol || kline.Interval != s.InvestmentInterval { + return + } + + if s.BudgetPeriodStartTime == (time.Time{}) { + s.BudgetPeriodStartTime = kline.StartTime.Time().Truncate(time.Minute) + } + + if kline.EndTime.Time().Sub(s.BudgetPeriodStartTime) >= s.BudgetPeriod.Duration() { + // reset budget quota + s.BudgetQuota = s.Budget + s.BudgetPeriodStartTime = kline.StartTime.Time() + } + + // check if we have quota + if s.BudgetQuota.Compare(s.budgetPerInvestment) <= 0 { + return + } + + price := kline.Close + quantity := s.budgetPerInvestment.Div(price) + + s.submitOrders(ctx, orderExecutor, types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeBuy, + Type: types.OrderTypeMarket, + Quantity: quantity, + Market: s.Market, + }) + }) + + return nil +} diff --git a/pkg/strategy/trailingstop/strategy.go b/pkg/strategy/emastop/strategy.go similarity index 74% rename from pkg/strategy/trailingstop/strategy.go rename to pkg/strategy/emastop/strategy.go index 7554b033d3..89c837b370 100644 --- a/pkg/strategy/trailingstop/strategy.go +++ b/pkg/strategy/emastop/strategy.go @@ -1,4 +1,4 @@ -package trailingstop +package emastop import ( "context" @@ -13,27 +13,20 @@ import ( "github.com/c9s/bbgo/pkg/types" ) -var log = logrus.WithField("strategy", "trailingstop") +const ID = "emastop" -// The indicators (SMA and EWMA) that we want to use are returning float64 data. -type Float64Indicator interface { - Last() float64 -} +var log = logrus.WithField("strategy", ID) func init() { // Register the pointer of the strategy struct, // so that bbgo knows what struct to be used to unmarshal the configs (YAML or JSON) // Note: built-in strategies need to imported manually in the bbgo cmd package. - bbgo.RegisterStrategy("trailingstop", &Strategy{}) + bbgo.RegisterStrategy(ID, &Strategy{}) } type Strategy struct { *bbgo.Graceful - // The notification system will be injected into the strategy automatically. - // This field will be injected automatically since it's a single exchange strategy. - *bbgo.Notifiability - SourceExchangeName string `json:"sourceExchange"` TargetExchangeName string `json:"targetExchange"` @@ -71,9 +64,13 @@ type Strategy struct { order types.Order } +func (s *Strategy) ID() string { + return ID +} + func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { - session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.Interval.String()}) - session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.MovingAverageInterval.String()}) + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.Interval}) + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.MovingAverageInterval}) } func (s *Strategy) CrossSubscribe(sessions map[string]*bbgo.ExchangeSession) { @@ -82,12 +79,12 @@ func (s *Strategy) CrossSubscribe(sessions map[string]*bbgo.ExchangeSession) { // make sure we have the connection alive targetSession := sessions[s.TargetExchangeName] - targetSession.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.Interval.String()}) + targetSession.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.Interval}) } -func (s *Strategy) clear(ctx context.Context, session *bbgo.ExchangeSession) { +func (s *Strategy) clear(ctx context.Context, orderExecutor bbgo.OrderExecutor) { if s.order.OrderID > 0 { - if err := session.Exchange.CancelOrders(ctx, s.order); err != nil { + if err := orderExecutor.CancelOrders(ctx, s.order); err != nil { log.WithError(err).Errorf("can not cancel trailingstop order: %+v", s.order) } @@ -96,22 +93,25 @@ func (s *Strategy) clear(ctx context.Context, session *bbgo.ExchangeSession) { } } -func (s *Strategy) place(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession, indicator Float64Indicator, closePrice float64) { - movingAveragePrice := indicator.Last() +func (s *Strategy) place(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession, indicator types.Float64Indicator, closePrice fixedpoint.Value) { + closePriceF := closePrice.Float64() + movingAveragePriceF := indicator.Last() // skip it if it's near zero because it's not loaded yet - if movingAveragePrice < 0.0001 { - log.Warnf("moving average price is near 0: %f", movingAveragePrice) + if movingAveragePriceF < 0.0001 { + log.Warnf("moving average price is near 0: %f", movingAveragePriceF) return } // place stop limit order only when the closed price is greater than the moving average price - if closePrice <= movingAveragePrice { - log.Warnf("close price %f is less than moving average price %f", closePrice, movingAveragePrice) + if closePriceF <= movingAveragePriceF { + log.Warnf("close price %v is less than moving average price %f", closePrice, movingAveragePriceF) return } - var price = 0.0 + movingAveragePrice := fixedpoint.NewFromFloat(movingAveragePriceF) + + var price = fixedpoint.Zero var orderType = types.OrderTypeStopMarket switch strings.ToLower(s.OrderType) { @@ -120,8 +120,8 @@ func (s *Strategy) place(ctx context.Context, orderExecutor bbgo.OrderExecutor, case "limit": orderType = types.OrderTypeStopLimit price = movingAveragePrice - if s.PriceRatio > 0 { - price = price * s.PriceRatio.Float64() + if s.PriceRatio.Sign() > 0 { + price = price.Mul(s.PriceRatio) } } @@ -132,24 +132,25 @@ func (s *Strategy) place(ctx context.Context, orderExecutor bbgo.OrderExecutor, } quantity := s.Quantity - if s.BalancePercentage > 0 { + if s.BalancePercentage.Sign() > 0 { - if balance, ok := session.Account.Balance(market.BaseCurrency); ok { + if balance, ok := session.GetAccount().Balance(market.BaseCurrency); ok { quantity = balance.Available.Mul(s.BalancePercentage) } } - if quantity.Float64()*closePrice < market.MinNotional { - log.Errorf("the amount of stop order (%f) is less than min notional %f", quantity.Float64()*closePrice, market.MinNotional) + amount := quantity.Mul(closePrice) + if amount.Compare(market.MinNotional) < 0 { + log.Errorf("the amount of stop order (%v) is less than min notional %v", amount, market.MinNotional) return } var stopPrice = movingAveragePrice - if s.StopPriceRatio > 0 { - stopPrice = stopPrice * s.StopPriceRatio.Float64() + if s.StopPriceRatio.Sign() > 0 { + stopPrice = stopPrice.Mul(s.StopPriceRatio) } - log.Infof("placing trailingstop order %s at stop price %f, quantity %f", s.Symbol, stopPrice, quantity.Float64()) + log.Infof("placing trailingstop order %s at stop price %v, quantity %v", s.Symbol, stopPrice, quantity) retOrders, err := orderExecutor.SubmitOrders(ctx, types.SubmitOrder{ Symbol: s.Symbol, @@ -157,7 +158,7 @@ func (s *Strategy) place(ctx context.Context, orderExecutor bbgo.OrderExecutor, Type: orderType, Price: price, StopPrice: stopPrice, - Quantity: quantity.Float64(), + Quantity: quantity, }) if err != nil { log.WithError(err).Error("submit order error") @@ -174,7 +175,7 @@ func (s *Strategy) handleOrderUpdate(order types.Order) { } } -func (s *Strategy) loadIndicator(sourceSession *bbgo.ExchangeSession) (Float64Indicator, error) { +func (s *Strategy) loadIndicator(sourceSession *bbgo.ExchangeSession) (types.Float64Indicator, error) { var standardIndicatorSet, ok = sourceSession.StandardIndicatorSet(s.Symbol) if !ok { return nil, fmt.Errorf("standardIndicatorSet is nil, symbol %s", s.Symbol) @@ -200,10 +201,10 @@ func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, se return err } - session.Stream.OnOrderUpdate(s.handleOrderUpdate) + session.UserDataStream.OnOrderUpdate(s.handleOrderUpdate) - // session.Stream.OnKLineClosed - session.Stream.OnKLineClosed(func(kline types.KLine) { + // session.UserDataStream.OnKLineClosed + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { // skip k-lines from other symbols if kline.Symbol != s.Symbol || kline.Interval != s.Interval { return @@ -212,14 +213,14 @@ func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, se closePrice := kline.Close // ok, it's our call, we need to cancel the stop limit order first - s.clear(ctx, session) + s.clear(ctx, orderExecutor) s.place(ctx, orderExecutor, session, indicator, closePrice) }) s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { defer wg.Done() log.Infof("canceling trailingstop order...") - s.clear(ctx, session) + s.clear(ctx, orderExecutor) }) if lastPrice, ok := session.LastPrice(s.Symbol); ok { @@ -244,10 +245,10 @@ func (s *Strategy) CrossRun(ctx context.Context, _ bbgo.OrderExecutionRouter, se return err } - session.Stream.OnOrderUpdate(s.handleOrderUpdate) + session.UserDataStream.OnOrderUpdate(s.handleOrderUpdate) - // session.Stream.OnKLineClosed - sourceSession.Stream.OnKLineClosed(func(kline types.KLine) { + // session.UserDataStream.OnKLineClosed + sourceSession.MarketDataStream.OnKLineClosed(func(kline types.KLine) { // skip k-lines from other symbols if kline.Symbol != s.Symbol || kline.Interval != s.Interval { return @@ -256,14 +257,14 @@ func (s *Strategy) CrossRun(ctx context.Context, _ bbgo.OrderExecutionRouter, se closePrice := kline.Close // ok, it's our call, we need to cancel the stop limit order first - s.clear(ctx, session) + s.clear(ctx, &orderExecutor) s.place(ctx, &orderExecutor, session, indicator, closePrice) }) s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { defer wg.Done() log.Infof("canceling trailingstop order...") - s.clear(ctx, session) + s.clear(ctx, &orderExecutor) }) if lastPrice, ok := session.LastPrice(s.Symbol); ok { diff --git a/pkg/strategy/etf/strategy.go b/pkg/strategy/etf/strategy.go new file mode 100644 index 0000000000..f14c0a102d --- /dev/null +++ b/pkg/strategy/etf/strategy.go @@ -0,0 +1,107 @@ +package etf + +import ( + "context" + "time" + + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/fixedpoint" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/types" +) + +const ID = "etf" + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type Strategy struct { + Market types.Market + + TotalAmount fixedpoint.Value `json:"totalAmount,omitempty"` + + // Interval is the period that you want to submit order + Duration types.Duration `json:"duration"` + + Index map[string]fixedpoint.Value `json:"index"` +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { +} + +func (s *Strategy) Validate() error { + if s.TotalAmount.IsZero() { + return errors.New("amount can not be empty") + } + + return nil +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + go func() { + ticker := time.NewTicker(s.Duration.Duration()) + defer ticker.Stop() + + bbgo.Notify("ETF orders will be executed every %s", s.Duration.Duration().String()) + + for { + select { + case <-ctx.Done(): + return + + case <-ticker.C: + totalAmount := s.TotalAmount + for symbol, ratio := range s.Index { + amount := totalAmount.Mul(ratio) + + ticker, err := session.Exchange.QueryTicker(ctx, symbol) + if err != nil { + bbgo.Notify("query ticker error: %s", err.Error()) + log.WithError(err).Error("query ticker error") + break + } + + askPrice := ticker.Sell + quantity := askPrice.Div(amount) + + // execute orders + quoteBalance, ok := session.GetAccount().Balance(s.Market.QuoteCurrency) + if !ok { + break + } + if quoteBalance.Available.Compare(amount) < 0 { + bbgo.Notify("Quote balance %s is not enough: %s < %s", s.Market.QuoteCurrency, quoteBalance.Available.String(), amount.String()) + break + } + + bbgo.Notify("Submitting etf order %s quantity %s at price %s (index ratio %s)", + symbol, + quantity.String(), + askPrice.String(), + ratio.Percentage()) + _, err = orderExecutor.SubmitOrders(ctx, types.SubmitOrder{ + Symbol: symbol, + Side: types.SideTypeBuy, + Type: types.OrderTypeMarket, + Quantity: quantity, + }) + + if err != nil { + log.WithError(err).Error("submit order error") + } + + } + } + } + }() + + return nil +} diff --git a/pkg/strategy/ewoDgtrd/heikinashi.go b/pkg/strategy/ewoDgtrd/heikinashi.go new file mode 100644 index 0000000000..fca1934c03 --- /dev/null +++ b/pkg/strategy/ewoDgtrd/heikinashi.go @@ -0,0 +1,49 @@ +package ewoDgtrd + +import ( + "fmt" + "math" + + "github.com/c9s/bbgo/pkg/types" +) + +type HeikinAshi struct { + Close *types.Queue + Open *types.Queue + High *types.Queue + Low *types.Queue + Volume *types.Queue +} + +func NewHeikinAshi(size int) *HeikinAshi { + return &HeikinAshi{ + Close: types.NewQueue(size), + Open: types.NewQueue(size), + High: types.NewQueue(size), + Low: types.NewQueue(size), + Volume: types.NewQueue(size), + } +} + +func (s *HeikinAshi) Print() string { + return fmt.Sprintf("Heikin c: %.3f, o: %.3f, h: %.3f, l: %.3f, v: %.3f", + s.Close.Last(), + s.Open.Last(), + s.High.Last(), + s.Low.Last(), + s.Volume.Last()) +} + +func (inc *HeikinAshi) Update(kline types.KLine) { + open := kline.Open.Float64() + cloze := kline.Close.Float64() + high := kline.High.Float64() + low := kline.Low.Float64() + newClose := (open + high + low + cloze) / 4. + newOpen := (inc.Open.Last() + inc.Close.Last()) / 2. + inc.Close.Update(newClose) + inc.Open.Update(newOpen) + inc.High.Update(math.Max(math.Max(high, newOpen), newClose)) + inc.Low.Update(math.Min(math.Min(low, newOpen), newClose)) + inc.Volume.Update(kline.Volume.Float64()) +} diff --git a/pkg/strategy/ewoDgtrd/strategy.go b/pkg/strategy/ewoDgtrd/strategy.go new file mode 100644 index 0000000000..48f5dc3d08 --- /dev/null +++ b/pkg/strategy/ewoDgtrd/strategy.go @@ -0,0 +1,1288 @@ +package ewoDgtrd + +import ( + "context" + "errors" + "fmt" + "math" + "os" + "sync" + + "github.com/fatih/color" + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" +) + +const ID = "ewo_dgtrd" + +var log = logrus.WithField("strategy", ID) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type Strategy struct { + Position *types.Position `json:"position,omitempty" persistence:"position"` + ProfitStats *types.ProfitStats `json:"profitStats,omitempty" persistence:"profit_stats"` + + Market types.Market + Session *bbgo.ExchangeSession + UseHeikinAshi bool `json:"useHeikinAshi"` // use heikinashi kline + Stoploss fixedpoint.Value `json:"stoploss"` + Symbol string `json:"symbol"` + Interval types.Interval `json:"interval"` + UseEma bool `json:"useEma"` // use exponential ma or not + UseSma bool `json:"useSma"` // if UseEma == false, use simple ma or not + SignalWindow int `json:"sigWin"` // signal window + DisableShortStop bool `json:"disableShortStop"` // disable SL on short + DisableLongStop bool `json:"disableLongStop"` // disable SL on long + FilterHigh float64 `json:"cciStochFilterHigh"` // high filter for CCI Stochastic indicator + FilterLow float64 `json:"cciStochFilterLow"` // low filter for CCI Stochastic indicator + EwoChangeFilterHigh float64 `json:"ewoChangeFilterHigh"` // high filter for ewo histogram + EwoChangeFilterLow float64 `json:"ewoChangeFilterLow"` // low filter for ewo histogram + + Record bool `json:"record"` // print record messages on position exit point + + KLineStartTime types.Time + KLineEndTime types.Time + + *bbgo.Environment + *bbgo.Notifiability + *bbgo.Persistence + *bbgo.Graceful + bbgo.StrategyController + + activeMakerOrders *bbgo.ActiveOrderBook + orderStore *bbgo.OrderStore + tradeCollector *bbgo.TradeCollector + entryPrice fixedpoint.Value + waitForTrade bool + + atr *indicator.ATR + emv *indicator.EMV + ccis *CCISTOCH + ma5 types.Series + ma34 types.Series + ewo types.Series + ewoSignal types.Series + ewoHistogram types.Series + ewoChangeRate float64 + heikinAshi *HeikinAshi + peakPrice fixedpoint.Value + bottomPrice fixedpoint.Value + midPrice fixedpoint.Value + lock sync.RWMutex + + buyPrice fixedpoint.Value + sellPrice fixedpoint.Value +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) InstanceID() string { + return fmt.Sprintf("%s:%s", ID, s.Symbol) +} + +func (s *Strategy) Initialize() error { + return nil +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + log.Infof("subscribe %s", s.Symbol) + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: types.Interval1m}) + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.Interval}) + + session.Subscribe(types.BookTickerChannel, s.Symbol, types.SubscribeOptions{}) +} + +type UpdatableSeries interface { + types.Series + Update(value float64) +} + +// Refer: https://tw.tradingview.com/script/XZyG5SOx-CCI-Stochastic-and-a-quick-lesson-on-Scalping-Trading-Systems/ +type CCISTOCH struct { + cci *indicator.CCI + stoch *indicator.STOCH + ma *indicator.SMA + filterHigh float64 + filterLow float64 +} + +func NewCCISTOCH(i types.Interval, filterHigh, filterLow float64) *CCISTOCH { + cci := &indicator.CCI{IntervalWindow: types.IntervalWindow{Interval: i, Window: 28}} + stoch := &indicator.STOCH{IntervalWindow: types.IntervalWindow{Interval: i, Window: 28}} + ma := &indicator.SMA{IntervalWindow: types.IntervalWindow{Interval: i, Window: 3}} + return &CCISTOCH{ + cci: cci, + stoch: stoch, + ma: ma, + filterHigh: filterHigh, + filterLow: filterLow, + } +} + +func (inc *CCISTOCH) Update(cloze float64) { + inc.cci.Update(cloze) + inc.stoch.Update(inc.cci.Last(), inc.cci.Last(), inc.cci.Last()) + inc.ma.Update(inc.stoch.LastD()) +} + +func (inc *CCISTOCH) BuySignal() bool { + hasGrey := false + for i := 0; i < len(inc.ma.Values); i++ { + v := inc.ma.Index(i) + if v > inc.filterHigh { + return false + } else if v >= inc.filterLow && v <= inc.filterHigh { + hasGrey = true + continue + } else if v < inc.filterLow { + return hasGrey + } + } + return false +} + +func (inc *CCISTOCH) SellSignal() bool { + hasGrey := false + for i := 0; i < len(inc.ma.Values); i++ { + v := inc.ma.Index(i) + if v < inc.filterLow { + return false + } else if v >= inc.filterLow && v <= inc.filterHigh { + hasGrey = true + continue + } else if v > inc.filterHigh { + return hasGrey + } + } + return false +} + +type VWEMA struct { + PV UpdatableSeries + V UpdatableSeries +} + +func (inc *VWEMA) Last() float64 { + return inc.PV.Last() / inc.V.Last() +} + +func (inc *VWEMA) Index(i int) float64 { + if i >= inc.PV.Length() { + return 0 + } + vi := inc.V.Index(i) + if vi == 0 { + return 0 + } + return inc.PV.Index(i) / vi +} + +func (inc *VWEMA) Length() int { + pvl := inc.PV.Length() + vl := inc.V.Length() + if pvl < vl { + return pvl + } + return vl +} + +func (inc *VWEMA) Update(kline types.KLine) { + inc.PV.Update(kline.Close.Mul(kline.Volume).Float64()) + inc.V.Update(kline.Volume.Float64()) +} + +func (inc *VWEMA) UpdateVal(price float64, vol float64) { + inc.PV.Update(price * vol) + inc.V.Update(vol) +} + +// Setup the Indicators going to be used +func (s *Strategy) SetupIndicators(store *bbgo.MarketDataStore) { + window5 := types.IntervalWindow{Interval: s.Interval, Window: 5} + window34 := types.IntervalWindow{Interval: s.Interval, Window: 34} + s.atr = &indicator.ATR{IntervalWindow: window34} + s.emv = &indicator.EMV{IntervalWindow: types.IntervalWindow{Interval: s.Interval, Window: 14}} + s.ccis = NewCCISTOCH(s.Interval, s.FilterHigh, s.FilterLow) + + getSource := func(window types.KLineWindow) types.Series { + if s.UseHeikinAshi { + return s.heikinAshi.Close + } + return window.Close() + } + getVol := func(window types.KLineWindow) types.Series { + if s.UseHeikinAshi { + return s.heikinAshi.Volume + } + return window.Volume() + } + s.heikinAshi = NewHeikinAshi(500) + store.OnKLineWindowUpdate(func(interval types.Interval, window types.KLineWindow) { + if interval == s.atr.Interval { + if s.atr.RMA == nil { + for _, kline := range window { + high := kline.High.Float64() + low := kline.Low.Float64() + cloze := kline.Close.Float64() + vol := kline.Volume.Float64() + s.atr.Update(high, low, cloze) + s.emv.Update(high, low, vol) + } + } else { + kline := window[len(window)-1] + high := kline.High.Float64() + low := kline.Low.Float64() + cloze := kline.Close.Float64() + vol := kline.Volume.Float64() + s.atr.Update(high, low, cloze) + s.emv.Update(high, low, vol) + } + } + if s.Interval != interval { + return + } + if s.heikinAshi.Close.Length() == 0 { + for _, kline := range window { + s.heikinAshi.Update(kline) + s.ccis.Update(getSource(window).Last()) + } + } else { + s.heikinAshi.Update(window[len(window)-1]) + s.ccis.Update(getSource(window).Last()) + } + }) + if s.UseEma { + ema5 := &indicator.EWMA{IntervalWindow: window5} + ema34 := &indicator.EWMA{IntervalWindow: window34} + store.OnKLineWindowUpdate(func(interval types.Interval, window types.KLineWindow) { + if s.Interval != interval { + return + } + if ema5.Length() == 0 { + closes := types.ToReverseArray(getSource(window)) + for _, cloze := range closes { + ema5.Update(cloze) + ema34.Update(cloze) + } + } else { + cloze := getSource(window).Last() + ema5.Update(cloze) + ema34.Update(cloze) + } + + }) + + s.ma5 = ema5 + s.ma34 = ema34 + } else if s.UseSma { + sma5 := &indicator.SMA{IntervalWindow: window5} + sma34 := &indicator.SMA{IntervalWindow: window34} + store.OnKLineWindowUpdate(func(interval types.Interval, window types.KLineWindow) { + if s.Interval != interval { + return + } + if sma5.Length() == 0 { + closes := types.ToReverseArray(getSource(window)) + for _, cloze := range closes { + sma5.Update(cloze) + sma34.Update(cloze) + } + } else { + cloze := getSource(window).Last() + sma5.Update(cloze) + sma34.Update(cloze) + } + }) + s.ma5 = sma5 + s.ma34 = sma34 + } else { + evwma5 := &VWEMA{ + PV: &indicator.EWMA{IntervalWindow: window5}, + V: &indicator.EWMA{IntervalWindow: window5}, + } + evwma34 := &VWEMA{ + PV: &indicator.EWMA{IntervalWindow: window34}, + V: &indicator.EWMA{IntervalWindow: window34}, + } + store.OnKLineWindowUpdate(func(interval types.Interval, window types.KLineWindow) { + if s.Interval != interval { + return + } + clozes := getSource(window) + vols := getVol(window) + if evwma5.PV.Length() == 0 { + for i := clozes.Length() - 1; i >= 0; i-- { + price := clozes.Index(i) + vol := vols.Index(i) + evwma5.UpdateVal(price, vol) + evwma34.UpdateVal(price, vol) + } + } else { + price := clozes.Last() + vol := vols.Last() + evwma5.UpdateVal(price, vol) + evwma34.UpdateVal(price, vol) + } + }) + s.ma5 = evwma5 + s.ma34 = evwma34 + } + + s.ewo = types.Mul(types.Minus(types.Div(s.ma5, s.ma34), 1.0), 100.) + s.ewoHistogram = types.Minus(s.ma5, s.ma34) + windowSignal := types.IntervalWindow{Interval: s.Interval, Window: s.SignalWindow} + if s.UseEma { + sig := &indicator.EWMA{IntervalWindow: windowSignal} + store.OnKLineWindowUpdate(func(interval types.Interval, _ types.KLineWindow) { + if interval != s.Interval { + return + } + + if sig.Length() == 0 { + // lazy init + ewoVals := types.ToReverseArray(s.ewo) + for _, ewoValue := range ewoVals { + sig.Update(ewoValue) + } + } else { + sig.Update(s.ewo.Last()) + } + }) + s.ewoSignal = sig + } else if s.UseSma { + sig := &indicator.SMA{IntervalWindow: windowSignal} + store.OnKLineWindowUpdate(func(interval types.Interval, _ types.KLineWindow) { + if interval != s.Interval { + return + } + + if sig.Length() == 0 { + // lazy init + ewoVals := types.ToReverseArray(s.ewo) + for _, ewoValue := range ewoVals { + sig.Update(ewoValue) + } + } else { + sig.Update(s.ewo.Last()) + } + }) + s.ewoSignal = sig + } else { + sig := &VWEMA{ + PV: &indicator.EWMA{IntervalWindow: windowSignal}, + V: &indicator.EWMA{IntervalWindow: windowSignal}, + } + store.OnKLineWindowUpdate(func(interval types.Interval, window types.KLineWindow) { + if interval != s.Interval { + return + } + if sig.Length() == 0 { + // lazy init + ewoVals := types.ToReverseArray(s.ewo) + for i, ewoValue := range ewoVals { + vol := window.Volume().Index(i) + sig.PV.Update(ewoValue * vol) + sig.V.Update(vol) + } + } else { + vol := window.Volume().Last() + sig.PV.Update(s.ewo.Last() * vol) + sig.V.Update(vol) + } + }) + s.ewoSignal = sig + } +} + +// Utility to evaluate if the order is valid or not to send to the exchange +func (s *Strategy) validateOrder(order *types.SubmitOrder) error { + if order.Type == types.OrderTypeMarket && order.TimeInForce != "" { + return errors.New("wrong field: market vs TimeInForce") + } + if order.Side == types.SideTypeSell { + baseBalance, ok := s.Session.GetAccount().Balance(s.Market.BaseCurrency) + if !ok { + log.Error("cannot get account") + return errors.New("cannot get account") + } + if order.Quantity.Compare(baseBalance.Available) > 0 { + log.Errorf("qty %v > avail %v", order.Quantity, baseBalance.Available) + return errors.New("qty > avail") + } + price := order.Price + if price.IsZero() { + price, ok = s.Session.LastPrice(s.Symbol) + if !ok { + log.Error("no price") + return errors.New("no price") + } + } + orderAmount := order.Quantity.Mul(price) + if order.Quantity.Sign() <= 0 || + order.Quantity.Compare(s.Market.MinQuantity) < 0 || + orderAmount.Compare(s.Market.MinNotional) < 0 { + log.Debug("amount fail") + return fmt.Errorf("amount fail: quantity: %v, amount: %v", order.Quantity, orderAmount) + } + return nil + } else if order.Side == types.SideTypeBuy { + quoteBalance, ok := s.Session.GetAccount().Balance(s.Market.QuoteCurrency) + if !ok { + log.Error("cannot get account") + return errors.New("cannot get account") + } + price := order.Price + if price.IsZero() { + price, ok = s.Session.LastPrice(s.Symbol) + if !ok { + log.Error("no price") + return errors.New("no price") + } + } + totalQuantity := quoteBalance.Available.Div(price) + if order.Quantity.Compare(totalQuantity) > 0 { + log.Errorf("qty %v > avail %v", order.Quantity, totalQuantity) + return errors.New("qty > avail") + } + orderAmount := order.Quantity.Mul(price) + if order.Quantity.Sign() <= 0 || + orderAmount.Compare(s.Market.MinNotional) < 0 || + order.Quantity.Compare(s.Market.MinQuantity) < 0 { + log.Debug("amount fail") + return fmt.Errorf("amount fail: quantity: %v, amount: %v", order.Quantity, orderAmount) + } + return nil + } + log.Error("side error") + return errors.New("side error") + +} + +func (s *Strategy) PlaceBuyOrder(ctx context.Context, price fixedpoint.Value) (*types.Order, *types.Order) { + var closeOrder *types.Order + var ok bool + waitForTrade := false + base := s.Position.GetBase() + if base.Abs().Compare(s.Market.MinQuantity) >= 0 && base.Mul(s.GetLastPrice()).Abs().Compare(s.Market.MinNotional) >= 0 && base.Sign() < 0 { + if closeOrder, ok = s.ClosePosition(ctx); !ok { + log.Errorf("sell position %v remained not closed, skip placing order", base) + return closeOrder, nil + } + } + if s.Position.GetBase().Sign() < 0 { + // we are not able to make close trade at this moment, + // will close the rest of the position by normal limit order + // s.entryPrice is set in the last trade + waitForTrade = true + } + quoteBalance, ok := s.Session.GetAccount().Balance(s.Market.QuoteCurrency) + if !ok { + log.Infof("buy order at price %v failed", price) + return closeOrder, nil + } + quantityAmount := quoteBalance.Available + totalQuantity := quantityAmount.Div(price) + order := types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeBuy, + Type: types.OrderTypeLimit, + Price: price, + Quantity: totalQuantity, + Market: s.Market, + TimeInForce: types.TimeInForceGTC, + } + if err := s.validateOrder(&order); err != nil { + log.Infof("validation failed %v: %v", order, err) + return closeOrder, nil + } + log.Warnf("long at %v, position %v, closeOrder %v, timestamp: %s", price, s.Position.GetBase(), closeOrder, s.KLineStartTime) + createdOrders, err := s.Session.Exchange.SubmitOrders(ctx, order) + if err != nil { + log.WithError(err).Errorf("cannot place order") + return closeOrder, nil + } + + log.Infof("post order c: %v, entryPrice: %v o: %v", waitForTrade, s.entryPrice, createdOrders) + s.waitForTrade = waitForTrade + s.orderStore.Add(createdOrders...) + s.activeMakerOrders.Add(createdOrders...) + s.tradeCollector.Process() + return closeOrder, &createdOrders[0] +} + +func (s *Strategy) PlaceSellOrder(ctx context.Context, price fixedpoint.Value) (*types.Order, *types.Order) { + var closeOrder *types.Order + var ok bool + waitForTrade := false + base := s.Position.GetBase() + if base.Abs().Compare(s.Market.MinQuantity) >= 0 && base.Abs().Mul(s.GetLastPrice()).Compare(s.Market.MinNotional) >= 0 && base.Sign() > 0 { + if closeOrder, ok = s.ClosePosition(ctx); !ok { + log.Errorf("buy position %v remained not closed, skip placing order", base) + return closeOrder, nil + } + } + if s.Position.GetBase().Sign() > 0 { + // we are not able to make close trade at this moment, + // will close the rest of the position by normal limit order + // s.entryPrice is set in the last trade + waitForTrade = true + } + baseBalance, ok := s.Session.GetAccount().Balance(s.Market.BaseCurrency) + if !ok { + return closeOrder, nil + } + order := types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeSell, + Type: types.OrderTypeLimit, + Market: s.Market, + Quantity: baseBalance.Available, + Price: price, + TimeInForce: types.TimeInForceGTC, + } + if err := s.validateOrder(&order); err != nil { + log.Infof("validation failed %v: %v", order, err) + return closeOrder, nil + } + + log.Warnf("short at %v, position %v closeOrder %v, timestamp: %s", price, s.Position.GetBase(), closeOrder, s.KLineStartTime) + createdOrders, err := s.Session.Exchange.SubmitOrders(ctx, order) + if err != nil { + log.WithError(err).Errorf("cannot place order") + return closeOrder, nil + } + log.Infof("post order, c: %v, entryPrice: %v o: %v", waitForTrade, s.entryPrice, createdOrders) + s.waitForTrade = waitForTrade + s.orderStore.Add(createdOrders...) + s.activeMakerOrders.Add(createdOrders...) + s.tradeCollector.Process() + return closeOrder, &createdOrders[0] +} + +// ClosePosition(context.Context) -> (closeOrder *types.Order, ok bool) +// this will decorate the generated order from NewMarketCloseOrder +// add do necessary checks +// if available quantity is zero, will return (nil, true) +// if any of the checks failed, will return (nil, false) +// otherwise, return the created close order and true +func (s *Strategy) ClosePosition(ctx context.Context) (*types.Order, bool) { + order := s.Position.NewMarketCloseOrder(fixedpoint.One) + // no position exists + if order == nil { + // no base + s.sellPrice = fixedpoint.Zero + s.buyPrice = fixedpoint.Zero + return nil, true + } + order.TimeInForce = "" + // If there's any order not yet been traded in the orderbook, + // we need this additional check to make sure we have enough balance to post a close order + balances := s.Session.GetAccount().Balances() + baseBalance := balances[s.Market.BaseCurrency].Available + if order.Side == types.SideTypeBuy { + price := s.GetLastPrice() + quoteAmount := balances[s.Market.QuoteCurrency].Available.Div(price) + if order.Quantity.Compare(quoteAmount) > 0 { + order.Quantity = quoteAmount + } + } else if order.Side == types.SideTypeSell && order.Quantity.Compare(baseBalance) > 0 { + order.Quantity = baseBalance + } + // if no available balance... + if order.Quantity.IsZero() { + return nil, true + } + if err := s.validateOrder(order); err != nil { + log.Errorf("cannot place close order %v: %v", order, err) + return nil, false + } + + createdOrders, err := s.Session.Exchange.SubmitOrders(ctx, *order) + if err != nil { + log.WithError(err).Errorf("cannot place close order") + return nil, false + } + log.Infof("close order %v", createdOrders) + s.orderStore.Add(createdOrders...) + s.activeMakerOrders.Add(createdOrders...) + s.tradeCollector.Process() + return &createdOrders[0], true +} + +func (s *Strategy) CancelAll(ctx context.Context) { + var toCancel []types.Order + for _, order := range s.orderStore.Orders() { + if order.Status == types.OrderStatusNew || order.Status == types.OrderStatusPartiallyFilled { + toCancel = append(toCancel, order) + } + } + if len(toCancel) > 0 { + if err := s.Session.Exchange.CancelOrders(ctx, toCancel...); err != nil { + log.WithError(err).Errorf("cancel order error") + } + s.waitForTrade = false + } +} + +func (s *Strategy) GetLastPrice() fixedpoint.Value { + var lastPrice fixedpoint.Value + var ok bool + if s.Environment.IsBackTesting() { + lastPrice, ok = s.Session.LastPrice(s.Symbol) + if !ok { + log.Errorf("cannot get last price") + return lastPrice + } + } else { + s.lock.RLock() + if s.midPrice.IsZero() { + lastPrice, ok = s.Session.LastPrice(s.Symbol) + if !ok { + log.Errorf("cannot get last price") + return lastPrice + } + } else { + lastPrice = s.midPrice + } + s.lock.RUnlock() + } + return lastPrice +} + +// Trading Rules: +// - buy / sell the whole asset +// - SL by atr (lastprice < buyprice - atr) || (lastprice > sellprice + atr) +// - TP by detecting if there's a ewo pivotHigh(1,1) -> close long, or pivotLow(1,1) -> close short +// - TP by ma34 +- atr * 2 +// - TP by (lastprice < peak price - atr) || (lastprice > bottom price + atr) +// - SL by s.Stoploss (Abs(price_diff / price) > s.Stoploss) +// - entry condition on ewo(Elliott wave oscillator) Crosses ewoSignal(ma on ewo, signalWindow) +// * buy signal on (crossover on previous K bar and no crossunder on latest K bar) +// * sell signal on (crossunder on previous K bar and no crossunder on latest K bar) +// - and filtered by the following rules: +// * buy: buy signal ON, kline Close > Open, Close > ma5, Close > ma34, CCI Stochastic Buy signal +// * sell: sell signal ON, kline Close < Open, Close < ma5, Close < ma34, CCI Stochastic Sell signal +// - or entry when ma34 +- atr * 3 gets touched +// - entry price: latestPrice +- atr / 2 (short,long), close at market price +// Cancel non-fully filled orders on new signal (either in same direction or not) +// +// ps: kline might refer to heikinashi or normal ohlc +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + s.buyPrice = fixedpoint.Zero + s.sellPrice = fixedpoint.Zero + s.peakPrice = fixedpoint.Zero + s.bottomPrice = fixedpoint.Zero + + counterTPfromPeak := 0 + percentAvgTPfromPeak := 0.0 + counterTPfromCCI := 0 + percentAvgTPfromCCI := 0.0 + counterTPfromLongShort := 0 + percentAvgTPfromLongShort := 0.0 + counterTPfromAtr := 0 + percentAvgTPfromAtr := 0.0 + counterTPfromOrder := 0 + percentAvgTPfromOrder := 0.0 + counterSLfromSL := 0 + percentAvgSLfromSL := 0.0 + counterSLfromOrder := 0 + percentAvgSLfromOrder := 0.0 + + s.activeMakerOrders = bbgo.NewActiveOrderBook(s.Symbol) + s.activeMakerOrders.BindStream(session.UserDataStream) + + s.orderStore = bbgo.NewOrderStore(s.Symbol) + s.orderStore.BindStream(session.UserDataStream) + + if s.Position == nil { + s.Position = types.NewPositionFromMarket(s.Market) + } + if s.ProfitStats == nil { + s.ProfitStats = types.NewProfitStats(s.Market) + } + s.tradeCollector = bbgo.NewTradeCollector(s.Symbol, s.Position, s.orderStore) + s.tradeCollector.OnTrade(func(trade types.Trade, profit, netprofit fixedpoint.Value) { + if s.Symbol != trade.Symbol { + return + } + bbgo.Notify(trade) + s.ProfitStats.AddTrade(trade) + + if !profit.IsZero() { + log.Warnf("generate profit: %v, netprofit: %v, trade: %v", profit, netprofit, trade) + p := s.Position.NewProfit(trade, profit, netprofit) + p.Strategy = ID + p.StrategyInstanceID = s.InstanceID() + bbgo.Notify(&p) + + s.ProfitStats.AddProfit(p) + bbgo.Notify(&s.ProfitStats) + s.Environment.RecordPosition(s.Position, trade, &p) + } else { + s.Environment.RecordPosition(s.Position, trade, nil) + } + // calculate report for the position that cannot be closed by close order (amount too small) + if s.waitForTrade { + price := s.entryPrice + if price.IsZero() { + panic("no price found") + } + pnlRate := trade.Price.Sub(price).Abs().Div(trade.Price).Float64() + if s.Record { + log.Errorf("record avg %v trade %v", price, trade) + } + if trade.Side == types.SideTypeBuy { + if trade.Price.Compare(price) < 0 { + percentAvgTPfromOrder = percentAvgTPfromOrder*float64(counterTPfromOrder) + pnlRate + counterTPfromOrder += 1 + percentAvgTPfromOrder /= float64(counterTPfromOrder) + } else { + percentAvgSLfromOrder = percentAvgSLfromOrder*float64(counterSLfromOrder) + pnlRate + counterSLfromOrder += 1 + percentAvgSLfromOrder /= float64(counterSLfromOrder) + } + } else if trade.Side == types.SideTypeSell { + if trade.Price.Compare(price) > 0 { + percentAvgTPfromOrder = percentAvgTPfromOrder*float64(counterTPfromOrder) + pnlRate + counterTPfromOrder += 1 + percentAvgTPfromOrder /= float64(counterTPfromOrder) + } else { + percentAvgSLfromOrder = percentAvgSLfromOrder*float64(counterSLfromOrder) + pnlRate + counterSLfromOrder += 1 + percentAvgSLfromOrder /= float64(counterSLfromOrder) + } + } else { + panic(fmt.Sprintf("no sell(%v) or buy price(%v), %v", s.sellPrice, s.buyPrice, trade)) + } + s.waitForTrade = false + } + if s.Position.GetBase().Abs().Compare(s.Market.MinQuantity) >= 0 && s.Position.GetBase().Abs().Mul(trade.Price).Compare(s.Market.MinNotional) >= 0 { + sign := s.Position.GetBase().Sign() + if sign > 0 { + log.Infof("base become positive, %v", trade) + s.buyPrice = s.Position.AverageCost + s.sellPrice = fixedpoint.Zero + s.peakPrice = s.Position.AverageCost + } else if sign == 0 { + panic("not going to happen") + } else { + log.Infof("base become negative, %v", trade) + s.buyPrice = fixedpoint.Zero + s.sellPrice = s.Position.AverageCost + s.bottomPrice = s.Position.AverageCost + } + s.entryPrice = trade.Price + } else { + log.Infof("base become zero, rest of base: %v", s.Position.GetBase()) + if s.Position.GetBase().IsZero() { + s.entryPrice = fixedpoint.Zero + } + s.buyPrice = fixedpoint.Zero + s.sellPrice = fixedpoint.Zero + s.peakPrice = fixedpoint.Zero + s.bottomPrice = fixedpoint.Zero + } + }) + + s.tradeCollector.OnPositionUpdate(func(position *types.Position) { + log.Infof("position changed: %s", position) + bbgo.Notify(s.Position) + }) + s.tradeCollector.BindStream(session.UserDataStream) + + store, ok := s.Session.MarketDataStore(s.Symbol) + if !ok { + return fmt.Errorf("cannot get marketdatastore of %s", s.Symbol) + } + s.SetupIndicators(store) + + // local peak of ewo + shortSig := s.ewo.Last() < s.ewo.Index(1) && s.ewo.Index(1) > s.ewo.Index(2) + longSig := s.ewo.Last() > s.ewo.Index(1) && s.ewo.Index(1) < s.ewo.Index(2) + + sellOrderTPSL := func(price fixedpoint.Value) { + lastPrice := s.GetLastPrice() + base := s.Position.GetBase().Abs() + if base.Mul(lastPrice).Compare(s.Market.MinNotional) < 0 || base.Compare(s.Market.MinQuantity) < 0 { + return + } + if s.sellPrice.IsZero() { + return + } + balances := session.GetAccount().Balances() + quoteBalance := balances[s.Market.QuoteCurrency].Available + atr := fixedpoint.NewFromFloat(s.atr.Last()) + atrx2 := fixedpoint.NewFromFloat(s.atr.Last() * 2) + buyall := false + if s.bottomPrice.IsZero() || s.bottomPrice.Compare(price) > 0 { + s.bottomPrice = price + } + takeProfit := false + bottomBack := s.bottomPrice + spBack := s.sellPrice + reason := -1 + if quoteBalance.Div(lastPrice).Compare(s.Market.MinQuantity) >= 0 && quoteBalance.Compare(s.Market.MinNotional) >= 0 { + base := fixedpoint.NewFromFloat(s.ma34.Last()) + // TP + if lastPrice.Compare(s.sellPrice) < 0 && (longSig || + (!atrx2.IsZero() && base.Sub(atrx2).Compare(lastPrice) >= 0)) { + buyall = true + takeProfit = true + + // calculate report + if longSig { + reason = 1 + } else { + reason = 2 + } + + } + if !atr.IsZero() && s.bottomPrice.Add(atr).Compare(lastPrice) <= 0 && + lastPrice.Compare(s.sellPrice) < 0 { + buyall = true + takeProfit = true + reason = 3 + } + + // SL + /*if (!atrx2.IsZero() && s.bottomPrice.Add(atrx2).Compare(lastPrice) <= 0) || + lastPrice.Sub(s.bottomPrice).Div(lastPrice).Compare(s.Stoploss) > 0 { + if lastPrice.Compare(s.sellPrice) < 0 { + takeProfit = true + } + buyall = true + s.bottomPrice = fixedpoint.Zero + }*/ + if !s.DisableShortStop && ((!atr.IsZero() && s.sellPrice.Sub(atr).Compare(lastPrice) >= 0) || + lastPrice.Sub(s.sellPrice).Div(s.sellPrice).Compare(s.Stoploss) > 0) { + buyall = true + reason = 4 + } + } + if buyall { + log.Warnf("buyall TPSL %v %v", s.Position.GetBase(), quoteBalance) + p := s.sellPrice + if order, ok := s.ClosePosition(ctx); order != nil && ok { + if takeProfit { + log.Errorf("takeprofit buy at %v, avg %v, l: %v, atrx2: %v", lastPrice, spBack, bottomBack, atrx2) + } else { + log.Errorf("stoploss buy at %v, avg %v, l: %v, atrx2: %v", lastPrice, spBack, bottomBack, atrx2) + } + + // calculate report + if s.Record { + log.Error("record ba") + } + var pnlRate float64 + if takeProfit { + pnlRate = p.Sub(lastPrice).Div(lastPrice).Float64() + } else { + pnlRate = lastPrice.Sub(p).Div(lastPrice).Float64() + } + switch reason { + case 0: + percentAvgTPfromCCI = percentAvgTPfromCCI*float64(counterTPfromCCI) + pnlRate + counterTPfromCCI += 1 + percentAvgTPfromCCI /= float64(counterTPfromCCI) + case 1: + percentAvgTPfromLongShort = percentAvgTPfromLongShort*float64(counterTPfromLongShort) + pnlRate + counterTPfromLongShort += 1 + percentAvgTPfromLongShort /= float64(counterTPfromLongShort) + case 2: + percentAvgTPfromAtr = percentAvgTPfromAtr*float64(counterTPfromAtr) + pnlRate + counterTPfromAtr += 1 + percentAvgTPfromAtr /= float64(counterTPfromAtr) + case 3: + percentAvgTPfromPeak = percentAvgTPfromPeak*float64(counterTPfromPeak) + pnlRate + counterTPfromPeak += 1 + percentAvgTPfromPeak /= float64(counterTPfromPeak) + case 4: + percentAvgSLfromSL = percentAvgSLfromSL*float64(counterSLfromSL) + pnlRate + counterSLfromSL += 1 + percentAvgSLfromSL /= float64(counterSLfromSL) + + } + } + } + } + buyOrderTPSL := func(price fixedpoint.Value) { + lastPrice := s.GetLastPrice() + base := s.Position.GetBase().Abs() + if base.Mul(lastPrice).Compare(s.Market.MinNotional) < 0 || base.Compare(s.Market.MinQuantity) < 0 { + return + } + if s.buyPrice.IsZero() { + return + } + balances := session.GetAccount().Balances() + baseBalance := balances[s.Market.BaseCurrency].Available + atr := fixedpoint.NewFromFloat(s.atr.Last()) + atrx2 := fixedpoint.NewFromFloat(s.atr.Last() * 2) + sellall := false + if s.peakPrice.IsZero() || s.peakPrice.Compare(price) < 0 { + s.peakPrice = price + } + takeProfit := false + peakBack := s.peakPrice + bpBack := s.buyPrice + reason := -1 + if baseBalance.Compare(s.Market.MinQuantity) >= 0 && baseBalance.Mul(lastPrice).Compare(s.Market.MinNotional) >= 0 { + // TP + base := fixedpoint.NewFromFloat(s.ma34.Last()) + if lastPrice.Compare(s.buyPrice) > 0 && (shortSig || + (!atrx2.IsZero() && base.Add(atrx2).Compare(lastPrice) <= 0)) { + sellall = true + takeProfit = true + + // calculate report + if shortSig { + reason = 1 + } else { + reason = 2 + } + } + if !atr.IsZero() && s.peakPrice.Sub(atr).Compare(lastPrice) >= 0 && + lastPrice.Compare(s.buyPrice) > 0 { + sellall = true + takeProfit = true + reason = 3 + } + + // SL + /*if s.peakPrice.Sub(lastPrice).Div(s.peakPrice).Compare(s.Stoploss) > 0 || + (!atrx2.IsZero() && s.peakPrice.Sub(atrx2).Compare(lastPrice) >= 0) { + if lastPrice.Compare(s.buyPrice) > 0 { + takeProfit = true + } + sellall = true + s.peakPrice = fixedpoint.Zero + }*/ + if !s.DisableLongStop && (s.buyPrice.Sub(lastPrice).Div(s.buyPrice).Compare(s.Stoploss) > 0 || + (!atr.IsZero() && s.buyPrice.Sub(atr).Compare(lastPrice) >= 0)) { + sellall = true + reason = 4 + } + } + + if sellall { + log.Warnf("sellall TPSL %v", s.Position.GetBase()) + p := s.buyPrice + if order, ok := s.ClosePosition(ctx); order != nil && ok { + if takeProfit { + log.Errorf("takeprofit sell at %v, avg %v, h: %v, atrx2: %v", lastPrice, bpBack, peakBack, atrx2) + } else { + log.Errorf("stoploss sell at %v, avg %v, h: %v, atrx2: %v", lastPrice, bpBack, peakBack, atrx2) + } + // calculate report + if s.Record { + log.Error("record sa") + } + var pnlRate float64 + if takeProfit { + pnlRate = lastPrice.Sub(p).Div(p).Float64() + } else { + pnlRate = p.Sub(lastPrice).Div(p).Float64() + } + switch reason { + case 0: + percentAvgTPfromCCI = percentAvgTPfromCCI*float64(counterTPfromCCI) + pnlRate + counterTPfromCCI += 1 + percentAvgTPfromCCI /= float64(counterTPfromCCI) + case 1: + percentAvgTPfromLongShort = percentAvgTPfromLongShort*float64(counterTPfromLongShort) + pnlRate + counterTPfromLongShort += 1 + percentAvgTPfromLongShort /= float64(counterTPfromLongShort) + case 2: + percentAvgTPfromAtr = percentAvgTPfromAtr*float64(counterTPfromAtr) + pnlRate + counterTPfromAtr += 1 + percentAvgTPfromAtr /= float64(counterTPfromAtr) + case 3: + percentAvgTPfromPeak = percentAvgTPfromPeak*float64(counterTPfromPeak) + pnlRate + counterTPfromPeak += 1 + percentAvgTPfromPeak /= float64(counterTPfromPeak) + case 4: + percentAvgSLfromSL = percentAvgSLfromSL*float64(counterSLfromSL) + pnlRate + counterSLfromSL += 1 + percentAvgSLfromSL /= float64(counterSLfromSL) + } + } + } + } + + // set last price by realtime book ticker update + // to trigger TP/SL + session.MarketDataStream.OnBookTickerUpdate(func(ticker types.BookTicker) { + if s.Environment.IsBackTesting() { + return + } + bestBid := ticker.Buy + bestAsk := ticker.Sell + var midPrice fixedpoint.Value + + if tryLock(&s.lock) { + if !bestAsk.IsZero() && !bestBid.IsZero() { + s.midPrice = bestAsk.Add(bestBid).Div(types.Two) + } else if !bestAsk.IsZero() { + s.midPrice = bestAsk + } else { + s.midPrice = bestBid + } + midPrice = s.midPrice + s.lock.Unlock() + } + + if !midPrice.IsZero() { + buyOrderTPSL(midPrice) + sellOrderTPSL(midPrice) + // log.Debugf("best bid %v, best ask %v, mid %v", bestBid, bestAsk, midPrice) + } + }) + + getHigh := func(window types.KLineWindow) types.Series { + if s.UseHeikinAshi { + return s.heikinAshi.High + } + return window.High() + } + getLow := func(window types.KLineWindow) types.Series { + if s.UseHeikinAshi { + return s.heikinAshi.Low + } + return window.Low() + } + getClose := func(window types.KLineWindow) types.Series { + if s.UseHeikinAshi { + return s.heikinAshi.Close + } + return window.Close() + } + getOpen := func(window types.KLineWindow) types.Series { + if s.UseHeikinAshi { + return s.heikinAshi.Open + } + return window.Open() + } + + store.OnKLineWindowUpdate(func(interval types.Interval, window types.KLineWindow) { + kline := window[len(window)-1] + s.KLineStartTime = kline.StartTime + s.KLineEndTime = kline.EndTime + + // well, only track prices on 1m + if interval == types.Interval1m { + + if s.Environment.IsBackTesting() { + buyOrderTPSL(kline.High) + sellOrderTPSL(kline.Low) + + } + } + + var lastPrice fixedpoint.Value + var ok bool + if s.Environment.IsBackTesting() { + lastPrice, ok = session.LastPrice(s.Symbol) + if !ok { + log.Errorf("cannot get last price") + return + } + } else { + s.lock.RLock() + if s.midPrice.IsZero() { + lastPrice, ok = session.LastPrice(s.Symbol) + if !ok { + log.Errorf("cannot get last price") + return + } + } else { + lastPrice = s.midPrice + } + s.lock.RUnlock() + } + balances := session.GetAccount().Balances() + baseBalance := balances[s.Market.BaseCurrency].Total() + quoteBalance := balances[s.Market.QuoteCurrency].Total() + atr := fixedpoint.NewFromFloat(s.atr.Last()) + if !s.Environment.IsBackTesting() { + log.Infof("Get last price: %v, ewo %f, ewoSig %f, ccis: %f, atr %v, kline: %v, balance[base]: %v balance[quote]: %v", + lastPrice, s.ewo.Last(), s.ewoSignal.Last(), s.ccis.ma.Last(), atr, kline, baseBalance, quoteBalance) + } + + if kline.Interval != s.Interval { + return + } + + priceHighest := types.Highest(getHigh(window), 233) + priceLowest := types.Lowest(getLow(window), 233) + priceChangeRate := (priceHighest - priceLowest) / priceHighest / 14 + ewoHighest := types.Highest(s.ewoHistogram, 233) + + s.ewoChangeRate = math.Abs(s.ewoHistogram.Last() / ewoHighest * priceChangeRate) + + longSignal := types.CrossOver(s.ewo, s.ewoSignal) + shortSignal := types.CrossUnder(s.ewo, s.ewoSignal) + + base := s.ma34.Last() + sellLine := base + s.atr.Last()*3 + buyLine := base - s.atr.Last()*3 + clozes := getClose(window) + opens := getOpen(window) + + // get trend flags + bull := clozes.Last() > opens.Last() + breakThrough := clozes.Last() > s.ma5.Last() && clozes.Last() > s.ma34.Last() + breakDown := clozes.Last() < s.ma5.Last() && clozes.Last() < s.ma34.Last() + + // kline breakthrough ma5, ma34 trend up, and cci Stochastic bull + IsBull := bull && breakThrough && s.ccis.BuySignal() && s.ewoChangeRate < s.EwoChangeFilterHigh && s.ewoChangeRate > s.EwoChangeFilterLow + // kline downthrough ma5, ma34 trend down, and cci Stochastic bear + IsBear := !bull && breakDown && s.ccis.SellSignal() && s.ewoChangeRate < s.EwoChangeFilterHigh && s.ewoChangeRate > s.EwoChangeFilterLow + + if !s.Environment.IsBackTesting() { + log.Infof("IsBull: %v, bull: %v, longSignal[1]: %v, shortSignal: %v, lastPrice: %v", + IsBull, bull, longSignal.Index(1), shortSignal.Last(), lastPrice) + log.Infof("IsBear: %v, bear: %v, shortSignal[1]: %v, longSignal: %v, lastPrice: %v", + IsBear, !bull, shortSignal.Index(1), longSignal.Last(), lastPrice) + } + + if (longSignal.Index(1) && !shortSignal.Last() && IsBull) || lastPrice.Float64() <= buyLine { + price := lastPrice.Sub(atr.Div(types.Two)) + // if total asset (including locked) could be used to buy + if quoteBalance.Div(price).Compare(s.Market.MinQuantity) >= 0 && quoteBalance.Compare(s.Market.MinNotional) >= 0 { + // cancel all orders to release lock + s.CancelAll(ctx) + + // backup, since the s.sellPrice will be cleared when doing ClosePosition + sellPrice := s.sellPrice + log.Errorf("ewoChangeRate %v, emv %v", s.ewoChangeRate, s.emv.Last()) + + // calculate report + if closeOrder, _ := s.PlaceBuyOrder(ctx, price); closeOrder != nil { + if s.Record { + log.Error("record l") + } + if !sellPrice.IsZero() { + if lastPrice.Compare(sellPrice) > 0 { + pnlRate := lastPrice.Sub(sellPrice).Div(lastPrice).Float64() + percentAvgTPfromOrder = percentAvgTPfromOrder*float64(counterTPfromOrder) + pnlRate + counterTPfromOrder += 1 + percentAvgTPfromOrder /= float64(counterTPfromOrder) + } else { + pnlRate := sellPrice.Sub(lastPrice).Div(lastPrice).Float64() + percentAvgSLfromOrder = percentAvgSLfromOrder*float64(counterSLfromOrder) + pnlRate + counterSLfromOrder += 1 + percentAvgSLfromOrder /= float64(counterSLfromOrder) + } + } else { + panic("no sell price") + } + } + } + } + if (shortSignal.Index(1) && !longSignal.Last() && IsBear) || lastPrice.Float64() >= sellLine { + price := lastPrice.Add(atr.Div(types.Two)) + // if total asset (including locked) could be used to sell + if baseBalance.Mul(price).Compare(s.Market.MinNotional) >= 0 && baseBalance.Compare(s.Market.MinQuantity) >= 0 { + // cancel all orders to release lock + s.CancelAll(ctx) + + // backup, since the s.buyPrice will be cleared when doing ClosePosition + buyPrice := s.buyPrice + log.Errorf("ewoChangeRate: %v, emv %v", s.ewoChangeRate, s.emv.Last()) + + // calculate report + if closeOrder, _ := s.PlaceSellOrder(ctx, price); closeOrder != nil { + if s.Record { + log.Error("record s") + } + if !buyPrice.IsZero() { + if lastPrice.Compare(buyPrice) > 0 { + pnlRate := lastPrice.Sub(buyPrice).Div(buyPrice).Float64() + percentAvgTPfromOrder = percentAvgTPfromOrder*float64(counterTPfromOrder) + pnlRate + counterTPfromOrder += 1 + percentAvgTPfromOrder /= float64(counterTPfromOrder) + } else { + pnlRate := buyPrice.Sub(lastPrice).Div(buyPrice).Float64() + percentAvgSLfromOrder = percentAvgSLfromOrder*float64(counterSLfromOrder) + pnlRate + counterSLfromOrder += 1 + percentAvgSLfromOrder /= float64(counterSLfromOrder) + } + } else { + panic("no buy price") + } + } + } + } + }) + s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { + defer wg.Done() + log.Infof("canceling active orders...") + s.CancelAll(ctx) + + s.tradeCollector.Process() + hiblue := color.New(color.FgHiBlue).FprintfFunc() + blue := color.New(color.FgBlue).FprintfFunc() + hiyellow := color.New(color.FgHiYellow).FprintfFunc() + hiblue(os.Stderr, "---- Trade Report (Without Fee) ----\n") + hiblue(os.Stderr, "TP:\n") + blue(os.Stderr, "\tpeak / bottom with atr: %d, avg pnl rate: %f\n", counterTPfromPeak, percentAvgTPfromPeak) + blue(os.Stderr, "\tCCI Stochastic: %d, avg pnl rate: %f\n", counterTPfromCCI, percentAvgTPfromCCI) + blue(os.Stderr, "\tLongSignal/ShortSignal: %d, avg pnl rate: %f\n", counterTPfromLongShort, percentAvgTPfromLongShort) + blue(os.Stderr, "\tma34 and Atrx2: %d, avg pnl rate: %f\n", counterTPfromAtr, percentAvgTPfromAtr) + blue(os.Stderr, "\tActive Order: %d, avg pnl rate: %f\n", counterTPfromOrder, percentAvgTPfromOrder) + + totalTP := counterTPfromPeak + counterTPfromCCI + counterTPfromLongShort + counterTPfromAtr + counterTPfromOrder + avgProfit := (float64(counterTPfromPeak)*percentAvgTPfromPeak + + float64(counterTPfromCCI)*percentAvgTPfromCCI + + float64(counterTPfromLongShort)*percentAvgTPfromLongShort + + float64(counterTPfromAtr)*percentAvgTPfromAtr + + float64(counterTPfromOrder)*percentAvgTPfromOrder) / float64(totalTP) + hiblue(os.Stderr, "\tSum: %d, avg pnl rate: %f\n", totalTP, avgProfit) + + hiblue(os.Stderr, "SL:\n") + blue(os.Stderr, "\tentry SL: %d, avg pnl rate: -%f\n", counterSLfromSL, percentAvgSLfromSL) + blue(os.Stderr, "\tActive Order: %d, avg pnl rate: -%f\n", counterSLfromOrder, percentAvgSLfromOrder) + + totalSL := counterSLfromSL + counterSLfromOrder + avgLoss := (float64(counterSLfromSL)*percentAvgSLfromSL + float64(counterSLfromOrder)*percentAvgSLfromOrder) / float64(totalSL) + hiblue(os.Stderr, "\tSum: %d, avg pnl rate: -%f\n", totalSL, avgLoss) + + hiblue(os.Stderr, "WinRate: %f\n", float64(totalTP)/float64(totalTP+totalSL)) + + maString := "vwema" + if s.UseSma { + maString = "sma" + } + if s.UseEma { + maString = "ema" + } + + hiyellow(os.Stderr, "----- EWO Settings -------\n") + hiyellow(os.Stderr, "General:\n") + hiyellow(os.Stderr, "\tuseHeikinAshi: %v\n", s.UseHeikinAshi) + hiyellow(os.Stderr, "\tstoploss: %v\n", s.Stoploss) + hiyellow(os.Stderr, "\tsymbol: %s\n", s.Symbol) + hiyellow(os.Stderr, "\tinterval: %s\n", s.Interval) + hiyellow(os.Stderr, "\tMA type: %s\n", maString) + hiyellow(os.Stderr, "\tdisableShortStop: %v\n", s.DisableShortStop) + hiyellow(os.Stderr, "\tdisableLongStop: %v\n", s.DisableLongStop) + hiyellow(os.Stderr, "\trecord: %v\n", s.Record) + hiyellow(os.Stderr, "CCI Stochastic:\n") + hiyellow(os.Stderr, "\tccistochFilterHigh: %f\n", s.FilterHigh) + hiyellow(os.Stderr, "\tccistochFilterLow: %f\n", s.FilterLow) + hiyellow(os.Stderr, "Ewo && Ewo Histogram:\n") + hiyellow(os.Stderr, "\tsigWin: %d\n", s.SignalWindow) + hiyellow(os.Stderr, "\tewoChngFilterHigh: %f\n", s.EwoChangeFilterHigh) + hiyellow(os.Stderr, "\tewoChngFilterLow: %f\n", s.EwoChangeFilterLow) + }) + return nil +} diff --git a/pkg/strategy/ewoDgtrd/trylock.go b/pkg/strategy/ewoDgtrd/trylock.go new file mode 100644 index 0000000000..f3e6e551a4 --- /dev/null +++ b/pkg/strategy/ewoDgtrd/trylock.go @@ -0,0 +1,16 @@ +//go:build !go1.18 +// +build !go1.18 + +package ewoDgtrd + +import "sync" + +func tryLock(lock *sync.RWMutex) bool { + lock.Lock() + return true +} + +func tryRLock(lock *sync.RWMutex) bool { + lock.RLock() + return true +} diff --git a/pkg/strategy/ewoDgtrd/trylock_18.go b/pkg/strategy/ewoDgtrd/trylock_18.go new file mode 100644 index 0000000000..1511766ae3 --- /dev/null +++ b/pkg/strategy/ewoDgtrd/trylock_18.go @@ -0,0 +1,14 @@ +//go:build go1.18 +// +build go1.18 + +package ewoDgtrd + +import "sync" + +func tryLock(lock *sync.RWMutex) bool { + return lock.TryLock() +} + +func tryRLock(lock *sync.RWMutex) bool { + return lock.TryRLock() +} diff --git a/pkg/strategy/factorzoo/correlation.go b/pkg/strategy/factorzoo/correlation.go new file mode 100644 index 0000000000..6e666d8fa6 --- /dev/null +++ b/pkg/strategy/factorzoo/correlation.go @@ -0,0 +1,103 @@ +package factorzoo + +import ( + "fmt" + "math" + "time" + + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" +) + +var zeroTime time.Time + +type KLineValueMapper func(k types.KLine) float64 + +//go:generate callbackgen -type Correlation +type Correlation struct { + types.IntervalWindow + Values types.Float64Slice + EndTime time.Time + + UpdateCallbacks []func(value float64) +} + +func (inc *Correlation) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *Correlation) calculateAndUpdate(klines []types.KLine) { + if len(klines) < inc.Window { + return + } + + var end = len(klines) - 1 + var lastKLine = klines[end] + + if inc.EndTime != zeroTime && lastKLine.GetEndTime().Before(inc.EndTime) { + return + } + + var recentT = klines[end-(inc.Window-1) : end+1] + + correlation, err := calculateCORRELATION(recentT, inc.Window, KLineAmplitudeMapper, indicator.KLineVolumeMapper) + if err != nil { + log.WithError(err).Error("can not calculate correlation") + return + } + inc.Values.Push(correlation) + + if len(inc.Values) > indicator.MaxNumOfVOL { + inc.Values = inc.Values[indicator.MaxNumOfVOLTruncateSize-1:] + } + + inc.EndTime = klines[end].GetEndTime().Time() + + inc.EmitUpdate(correlation) +} + +func (inc *Correlation) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *Correlation) Bind(updater indicator.KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +func calculateCORRELATION(klines []types.KLine, window int, valA KLineValueMapper, valB KLineValueMapper) (float64, error) { + length := len(klines) + if length == 0 || length < window { + return 0.0, fmt.Errorf("insufficient elements for calculating VOL with window = %d", window) + } + + sumA, sumB, sumAB, squareSumA, squareSumB := 0., 0., 0., 0., 0. + for _, k := range klines { + // sum of elements of array A + sumA += valA(k) + // sum of elements of array B + sumB += valB(k) + + // sum of A[i] * B[i]. + sumAB = sumAB + valA(k)*valB(k) + + // sum of square of array elements. + squareSumA = squareSumA + valA(k)*valA(k) + squareSumB = squareSumB + valB(k)*valB(k) + } + // use formula for calculating correlation coefficient. + corr := (float64(window)*sumAB - sumA*sumB) / + math.Sqrt((float64(window)*squareSumA-sumA*sumA)*(float64(window)*squareSumB-sumB*sumB)) + + return corr, nil +} + +func KLineAmplitudeMapper(k types.KLine) float64 { + return k.High.Div(k.Low).Float64() +} diff --git a/pkg/strategy/factorzoo/correlation_callbacks.go b/pkg/strategy/factorzoo/correlation_callbacks.go new file mode 100644 index 0000000000..2ef6323eae --- /dev/null +++ b/pkg/strategy/factorzoo/correlation_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type Correlation"; DO NOT EDIT. + +package factorzoo + +import () + +func (inc *Correlation) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *Correlation) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/strategy/factorzoo/strategy.go b/pkg/strategy/factorzoo/strategy.go new file mode 100644 index 0000000000..5ed9d7a847 --- /dev/null +++ b/pkg/strategy/factorzoo/strategy.go @@ -0,0 +1,279 @@ +package factorzoo + +import ( + "context" + "fmt" + + "github.com/sajari/regression" + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" +) + +const ID = "factorzoo" + +var three = fixedpoint.NewFromInt(3) + +var log = logrus.WithField("strategy", ID) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type IntervalWindowSetting struct { + types.IntervalWindow +} + +type Strategy struct { + Symbol string `json:"symbol"` + Market types.Market + Interval types.Interval `json:"interval"` + Quantity fixedpoint.Value `json:"quantity"` + + Position *types.Position `json:"position,omitempty"` + + activeMakerOrders *bbgo.ActiveOrderBook + orderStore *bbgo.OrderStore + tradeCollector *bbgo.TradeCollector + + session *bbgo.ExchangeSession + book *types.StreamOrderBook + + prevClose fixedpoint.Value + + pvDivergenceSetting *IntervalWindowSetting `json:"pvDivergence"` + pvDivergence *Correlation + + Ret []float64 + Alpha [][]float64 + + T int64 + prevER fixedpoint.Value +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) InstanceID() string { + return fmt.Sprintf("%s:%s", ID, s.Symbol) +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + log.Infof("subscribe %s", s.Symbol) + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.Interval}) +} + +func (s *Strategy) ClosePosition(ctx context.Context, percentage fixedpoint.Value) error { + base := s.Position.GetBase() + if base.IsZero() { + return fmt.Errorf("no opened %s position", s.Position.Symbol) + } + + // make it negative + quantity := base.Mul(percentage).Abs() + side := types.SideTypeBuy + if base.Sign() > 0 { + side = types.SideTypeSell + } + + if quantity.Compare(s.Market.MinQuantity) < 0 { + return fmt.Errorf("order quantity %v is too small, less than %v", quantity, s.Market.MinQuantity) + } + + submitOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: side, + Type: types.OrderTypeMarket, + Quantity: quantity, + Market: s.Market, + } + + // s.Notify("Submitting %s %s order to close position by %v", s.Symbol, side.String(), percentage, submitOrder) + + createdOrders, err := s.session.Exchange.SubmitOrders(ctx, submitOrder) + if err != nil { + log.WithError(err).Errorf("can not place position close order") + } + + s.orderStore.Add(createdOrders...) + s.activeMakerOrders.Add(createdOrders...) + return err +} + +func (s *Strategy) placeOrders(ctx context.Context, orderExecutor bbgo.OrderExecutor, er fixedpoint.Value) { + + // if s.prevER.Sign() < 0 && er.Sign() > 0 { + if er.Sign() >= 0 { + submitOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeBuy, + Type: types.OrderTypeMarket, + Quantity: s.Quantity, // er.Abs().Mul(fixedpoint.NewFromInt(20)), + } + createdOrders, err := orderExecutor.SubmitOrders(ctx, submitOrder) + if err != nil { + log.WithError(err).Errorf("can not place orders") + } + s.orderStore.Add(createdOrders...) + s.activeMakerOrders.Add(createdOrders...) + // } else if s.prevER.Sign() > 0 && er.Sign() < 0 { + } else { + submitOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeSell, + Type: types.OrderTypeMarket, + Quantity: s.Quantity, // er.Abs().Mul(fixedpoint.NewFromInt(20)), + } + createdOrders, err := orderExecutor.SubmitOrders(ctx, submitOrder) + if err != nil { + log.WithError(err).Errorf("can not place orders") + } + s.orderStore.Add(createdOrders...) + s.activeMakerOrders.Add(createdOrders...) + } + s.prevER = er +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + // initial required information + s.session = session + s.prevClose = fixedpoint.Zero + + // first we need to get market data store(cached market data) from the exchange session + st, _ := session.MarketDataStore(s.Symbol) + // setup the time frame size + iw := types.IntervalWindow{Window: 50, Interval: s.Interval} + // construct CORR indicator + s.pvDivergence = &Correlation{IntervalWindow: iw} + // bind indicator to the data store, so that our callback could be triggered + s.pvDivergence.Bind(st) + // s.pvDivergence.OnUpdate(func(corr float64) { + // //fmt.Printf("now we've got corr: %f\n", corr) + // }) + windowSize := 360 / s.Interval.Minutes() + if windowSize == 0 { + windowSize = 3 + } + drift := &indicator.Drift{IntervalWindow: types.IntervalWindow{Window: windowSize, Interval: s.Interval}} + drift.Bind(st) + + s.Alpha = [][]float64{{}, {}, {}, {}, {}, {}} + s.Ret = []float64{} + // thetas := []float64{0, 0, 0, 0} + preCompute := 0 + + s.activeMakerOrders = bbgo.NewActiveOrderBook(s.Symbol) + s.activeMakerOrders.BindStream(session.UserDataStream) + + s.orderStore = bbgo.NewOrderStore(s.Symbol) + s.orderStore.BindStream(session.UserDataStream) + + if s.Position == nil { + s.Position = types.NewPositionFromMarket(s.Market) + } + + s.tradeCollector = bbgo.NewTradeCollector(s.Symbol, s.Position, s.orderStore) + s.tradeCollector.BindStream(session.UserDataStream) + + session.UserDataStream.OnStart(func() { + log.Infof("connected") + }) + + s.T = 20 + + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + + if kline.Symbol != s.Symbol || kline.Interval != s.Interval { + return + } + + if err := s.activeMakerOrders.GracefulCancel(ctx, s.session.Exchange); err != nil { + log.WithError(err).Errorf("graceful cancel order error") + } + + // amplitude volume divergence + corr := fixedpoint.NewFromFloat(s.pvDivergence.Last()).Neg() + // price mean reversion + rev := fixedpoint.NewFromInt(1).Div(kline.Close) + // alpha150 from GTJA's 191 paper + a150 := kline.High.Add(kline.Low).Add(kline.Close).Div(three).Mul(kline.Volume) + // momentum from WQ's 101 paper + mom := fixedpoint.One.Sub(kline.Open.Div(kline.Close)).Mul(fixedpoint.NegOne) + // opening gap + ogap := kline.Open.Div(s.prevClose) + + driftVal := drift.Last() + + log.Infof("corr: %f, rev: %f, a150: %f, mom: %f, ogap: %f", corr.Float64(), rev.Float64(), a150.Float64(), mom.Float64(), ogap.Float64()) + s.Alpha[0] = append(s.Alpha[0], corr.Float64()) + s.Alpha[1] = append(s.Alpha[1], rev.Float64()) + s.Alpha[2] = append(s.Alpha[2], a150.Float64()) + s.Alpha[3] = append(s.Alpha[3], mom.Float64()) + s.Alpha[4] = append(s.Alpha[4], ogap.Float64()) + s.Alpha[5] = append(s.Alpha[5], driftVal) + + // s.Alpha[5] = append(s.Alpha[4], 1.0) // constant + + ret := kline.Close.Sub(s.prevClose).Div(s.prevClose).Float64() + s.Ret = append(s.Ret, ret) + log.Infof("Current Return: %f", s.Ret[len(s.Ret)-1]) + + // accumulate enough data for cross-sectional regression, not time-series regression + if preCompute < int(s.T)+1 { + preCompute++ + } else { + s.ClosePosition(ctx, fixedpoint.One) + s.tradeCollector.Process() + // rolling regression for last 20 interval alphas + r := new(regression.Regression) + r.SetObserved("Return Rate Per Timeframe") + r.SetVar(0, "Corr") + r.SetVar(1, "Rev") + r.SetVar(2, "A150") + r.SetVar(3, "Mom") + r.SetVar(4, "OGap") + r.SetVar(5, "Drift") + var rdp regression.DataPoints + for i := 1; i <= int(s.T); i++ { + // alphas[t-1], previous alphas, dot not take current alpha into account, will cause look-ahead bias + as := []float64{ + s.Alpha[0][len(s.Alpha[0])-(i+2)], + s.Alpha[1][len(s.Alpha[1])-(i+2)], + s.Alpha[2][len(s.Alpha[2])-(i+2)], + s.Alpha[3][len(s.Alpha[3])-(i+2)], + s.Alpha[4][len(s.Alpha[4])-(i+2)], + s.Alpha[5][len(s.Alpha[5])-(i+2)], + } + // alphas[t], current return rate + rt := s.Ret[len(s.Ret)-(i+1)] + rdp = append(rdp, regression.DataPoint(rt, as)) + + } + r.Train(rdp...) + r.Run() + fmt.Printf("Regression formula:\n%v\n", r.Formula) + // prediction := r.Coeff(0)*corr.Float64() + r.Coeff(1)*rev.Float64() + r.Coeff(2)*factorzoo.Float64() + r.Coeff(3)*mom.Float64() + r.Coeff(4) + prediction, _ := r.Predict([]float64{ + corr.Float64(), + rev.Float64(), + a150.Float64(), + mom.Float64(), + ogap.Float64(), + driftVal, + }) + log.Infof("Predicted Return: %f", prediction) + + s.placeOrders(ctx, orderExecutor, fixedpoint.NewFromFloat(prediction)) + s.tradeCollector.Process() + } + + s.prevClose = kline.Close + + }) + + return nil +} diff --git a/pkg/strategy/flashcrash/strategy.go b/pkg/strategy/flashcrash/strategy.go index 0ea031910c..b15fcbfb70 100644 --- a/pkg/strategy/flashcrash/strategy.go +++ b/pkg/strategy/flashcrash/strategy.go @@ -9,12 +9,15 @@ import ( log "github.com/sirupsen/logrus" "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/indicator" "github.com/c9s/bbgo/pkg/types" ) +const ID = "flashcrash" + func init() { - bbgo.RegisterStrategy("flashcrash", &Strategy{}) + bbgo.RegisterStrategy(ID, &Strategy{}) } type Strategy struct { @@ -28,13 +31,13 @@ type Strategy struct { // GridNum is the grid number, how many orders you want to places GridNum int `json:"gridNumber"` - Percentage float64 `json:"percentage"` + Percentage fixedpoint.Value `json:"percentage"` // BaseQuantity is the quantity you want to submit for each order. - BaseQuantity float64 `json:"baseQuantity"` + BaseQuantity fixedpoint.Value `json:"baseQuantity"` // activeOrders is the locally maintained active order book of the maker orders. - activeOrders *bbgo.LocalActiveOrderBook + activeOrders *bbgo.ActiveOrderBook // Injection fields start // -------------------------- @@ -54,8 +57,12 @@ type Strategy struct { ewma *indicator.EWMA } +func (s *Strategy) ID() string { + return ID +} + func (s *Strategy) updateOrders(orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) { - if err := session.Exchange.CancelOrders(context.Background(), s.activeOrders.Bids.Orders()...); err != nil { + if err := s.activeOrders.GracefulCancel(context.Background(), session.Exchange); err != nil { log.WithError(err).Errorf("cancel order error") } @@ -64,15 +71,15 @@ func (s *Strategy) updateOrders(orderExecutor bbgo.OrderExecutor, session *bbgo. func (s *Strategy) updateBidOrders(orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) { quoteCurrency := s.Market.QuoteCurrency - balances := session.Account.Balances() + balances := session.GetAccount().Balances() balance, ok := balances[quoteCurrency] - if !ok || balance.Available <= 0 { - log.Infof("insufficient balance of %s: %f", quoteCurrency, balance.Available.Float64()) + if !ok || balance.Available.Sign() <= 0 { + log.Infof("insufficient balance of %s: %v", quoteCurrency, balance.Available) return } - var startPrice = s.ewma.Last() * s.Percentage + var startPrice = fixedpoint.NewFromFloat(s.ewma.Last()).Mul(s.Percentage) var submitOrders []types.SubmitOrder for i := 0; i < s.GridNum; i++ { @@ -83,10 +90,10 @@ func (s *Strategy) updateBidOrders(orderExecutor bbgo.OrderExecutor, session *bb Market: s.Market, Quantity: s.BaseQuantity, Price: startPrice, - TimeInForce: "GTC", + TimeInForce: types.TimeInForceGTC, }) - startPrice *= s.Percentage + startPrice = startPrice.Mul(s.Percentage) } orders, err := orderExecutor.SubmitOrders(context.Background(), submitOrders...) @@ -99,20 +106,20 @@ func (s *Strategy) updateBidOrders(orderExecutor bbgo.OrderExecutor, session *bb } func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { - session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: string(s.Interval)}) + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.Interval}) } func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { // we don't persist orders so that we can not clear the previous orders for now. just need time to support this. - s.activeOrders = bbgo.NewLocalActiveOrderBook() - s.activeOrders.BindStream(session.Stream) + s.activeOrders = bbgo.NewActiveOrderBook(s.Symbol) + s.activeOrders.BindStream(session.UserDataStream) s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { defer wg.Done() log.Infof("canceling active orders...") - if err := session.Exchange.CancelOrders(ctx, s.activeOrders.Orders()...); err != nil { + if err := orderExecutor.CancelOrders(ctx, s.activeOrders.Orders()...); err != nil { log.WithError(err).Errorf("cancel order error") } }) @@ -122,11 +129,13 @@ func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, se Window: 25, }) - session.Stream.OnKLineClosed(func(kline types.KLine) { + session.UserDataStream.OnStart(func() { + s.updateOrders(orderExecutor, session) + }) + + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { s.updateOrders(orderExecutor, session) }) - // TODO: move this to the stream onConnect handler - s.updateOrders(orderExecutor, session) return nil } diff --git a/pkg/strategy/fmaker/A18.go b/pkg/strategy/fmaker/A18.go new file mode 100644 index 0000000000..e0c456a21b --- /dev/null +++ b/pkg/strategy/fmaker/A18.go @@ -0,0 +1,90 @@ +package fmaker + +import ( + "fmt" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" + "time" +) + +//go:generate callbackgen -type A18 +type A18 struct { + types.IntervalWindow + + // Values + Values types.Float64Slice + + EndTime time.Time + + UpdateCallbacks []func(val float64) +} + +func (inc *A18) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *A18) calculateAndUpdate(klines []types.KLine) { + if len(klines) < inc.Window { + return + } + + var end = len(klines) - 1 + var lastKLine = klines[end] + + if inc.EndTime != zeroTime && lastKLine.GetEndTime().Before(inc.EndTime) { + return + } + + var recentT = klines[end-(inc.Window-1) : end+1] + + val, err := calculateA18(recentT, indicator.KLineClosePriceMapper) + if err != nil { + log.WithError(err).Error("can not calculate") + return + } + inc.Values.Push(val) + + if len(inc.Values) > indicator.MaxNumOfVOL { + inc.Values = inc.Values[indicator.MaxNumOfVOLTruncateSize-1:] + } + + inc.EndTime = klines[end].GetEndTime().Time() + + inc.EmitUpdate(val) + +} + +func (inc *A18) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *A18) Bind(updater indicator.KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +// CLOSE/DELAY(CLOSE,5) +func calculateA18(klines []types.KLine, valClose KLineValueMapper) (float64, error) { + window := 5 + length := len(klines) + if length == 0 || length < window { + return 0., fmt.Errorf("insufficient elements for calculating with window = %d", window) + } + var closes types.Float64Slice + + for _, k := range klines { + closes.Push(valClose(k)) + } + + delay5 := closes.Index(4) + curr := closes.Index(0) + alpha := curr / delay5 + + return alpha, nil +} diff --git a/pkg/strategy/fmaker/A2.go b/pkg/strategy/fmaker/A2.go new file mode 100644 index 0000000000..8f72392598 --- /dev/null +++ b/pkg/strategy/fmaker/A2.go @@ -0,0 +1,102 @@ +package fmaker + +import ( + "fmt" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" + "time" +) + +//go:generate callbackgen -type A2 +type A2 struct { + types.IntervalWindow + + // Values + Values types.Float64Slice + + EndTime time.Time + + UpdateCallbacks []func(val float64) +} + +func (inc *A2) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *A2) calculateAndUpdate(klines []types.KLine) { + if len(klines) < inc.Window { + return + } + + var end = len(klines) - 1 + var lastKLine = klines[end] + + if inc.EndTime != zeroTime && lastKLine.GetEndTime().Before(inc.EndTime) { + return + } + + var recentT = klines[end-(inc.Window-1) : end+1] + + val, err := calculateA2(recentT, KLineLowPriceMapper, KLineHighPriceMapper, indicator.KLineClosePriceMapper) + if err != nil { + log.WithError(err).Error("can not calculate") + return + } + inc.Values.Push(val) + + if len(inc.Values) > indicator.MaxNumOfVOL { + inc.Values = inc.Values[indicator.MaxNumOfVOLTruncateSize-1:] + } + + inc.EndTime = klines[end].GetEndTime().Time() + + inc.EmitUpdate(val) + +} + +func (inc *A2) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *A2) Bind(updater indicator.KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +// (-1 * DELTA((((CLOSE - LOW) - (HIGH - CLOSE)) / (HIGH - LOW)), 1)) +func calculateA2(klines []types.KLine, valLow KLineValueMapper, valHigh KLineValueMapper, valClose KLineValueMapper) (float64, error) { + window := 2 + length := len(klines) + if length == 0 || length < window { + return 0., fmt.Errorf("insufficient elements for calculating with window = %d", window) + } + var lows types.Float64Slice + var highs types.Float64Slice + var closes types.Float64Slice + + for _, k := range klines { + lows.Push(valLow(k)) + highs.Push(valHigh(k)) + closes.Push(valClose(k)) + } + + prev := ((closes.Index(1) - lows.Index(1)) - (highs.Index(1) - closes.Index(1))) / (highs.Index(1) - lows.Index(1)) + curr := ((closes.Index(0) - lows.Index(0)) - (highs.Index(0) - closes.Index(0))) / (highs.Index(0) - lows.Index(0)) + alpha := (curr - prev) * -1 // delta(1 interval) + + return alpha, nil +} + +func KLineLowPriceMapper(k types.KLine) float64 { + return k.Low.Float64() +} + +func KLineHighPriceMapper(k types.KLine) float64 { + return k.High.Float64() +} diff --git a/pkg/strategy/fmaker/A3.go b/pkg/strategy/fmaker/A3.go new file mode 100644 index 0000000000..35e0cc3b8c --- /dev/null +++ b/pkg/strategy/fmaker/A3.go @@ -0,0 +1,108 @@ +package fmaker + +import ( + "fmt" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" + "math" + "time" +) + +//go:generate callbackgen -type A3 +type A3 struct { + types.IntervalWindow + + // Values + Values types.Float64Slice + + EndTime time.Time + + UpdateCallbacks []func(val float64) +} + +func (inc *A3) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *A3) calculateAndUpdate(klines []types.KLine) { + if len(klines) < inc.Window { + return + } + + var end = len(klines) - 1 + var lastKLine = klines[end] + + if inc.EndTime != zeroTime && lastKLine.GetEndTime().Before(inc.EndTime) { + return + } + + var recentT = klines[end-(inc.Window-1) : end+1] + + val, err := calculateA3(recentT, KLineLowPriceMapper, KLineHighPriceMapper, indicator.KLineClosePriceMapper) + if err != nil { + log.WithError(err).Error("can not calculate pivots") + return + } + inc.Values.Push(val) + + if len(inc.Values) > indicator.MaxNumOfVOL { + inc.Values = inc.Values[indicator.MaxNumOfVOLTruncateSize-1:] + } + + inc.EndTime = klines[end].GetEndTime().Time() + + inc.EmitUpdate(val) + +} + +func (inc *A3) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *A3) Bind(updater indicator.KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +// SUM((CLOSE = DELAY(CLOSE, 1)?0:CLOSE-(CLOSE>DELAY(CLOSE, 1)?MIN(LOW, DELAY(CLOSE, 1)):MAX(HIGH, DELAY(CLOSE, 1)))), 6) +func calculateA3(klines []types.KLine, valLow KLineValueMapper, valHigh KLineValueMapper, valClose KLineValueMapper) (float64, error) { + window := 6 + 2 + length := len(klines) + if length == 0 || length < window { + return 0., fmt.Errorf("insufficient elements for calculating with window = %d", window) + } + var lows types.Float64Slice + var highs types.Float64Slice + var closes types.Float64Slice + + for _, k := range klines { + lows.Push(valLow(k)) + highs.Push(valHigh(k)) + closes.Push(valClose(k)) + } + + a := 0. + sumA := 0. + for i := 1; i <= 6; i++ { + if closes.Index(len(closes)-i) == closes.Index(len(closes)-i-1) { + a = 0. + } else { + if closes.Index(len(closes)-i) > closes.Index(1) { + a = closes.Index(len(closes)-i) - math.Min(lows.Index(len(lows)-i), closes.Index(len(closes)-i-1)) + } else { + a = closes.Index(len(closes)-i) - math.Max(highs.Index(len(highs)-i), closes.Index(len(closes)-i-1)) + } + } + sumA += a + } + + alpha := sumA // sum(a, 6 interval) + + return alpha, nil +} diff --git a/pkg/strategy/fmaker/A34.go b/pkg/strategy/fmaker/A34.go new file mode 100644 index 0000000000..5062c13171 --- /dev/null +++ b/pkg/strategy/fmaker/A34.go @@ -0,0 +1,96 @@ +package fmaker + +import ( + "fmt" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" + "time" +) + +//go:generate callbackgen -type A34 +type A34 struct { + types.IntervalWindow + + // Values + Values types.Float64Slice + + EndTime time.Time + + UpdateCallbacks []func(val float64) +} + +func (inc *A34) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *A34) calculateAndUpdate(klines []types.KLine) { + if len(klines) < inc.Window { + return + } + + var end = len(klines) - 1 + var lastKLine = klines[end] + + if inc.EndTime != zeroTime && lastKLine.GetEndTime().Before(inc.EndTime) { + return + } + + var recentT = klines[end-(inc.Window-1) : end+1] + + val, err := calculateA34(recentT, indicator.KLineClosePriceMapper) + if err != nil { + log.WithError(err).Error("can not calculate pivots") + return + } + inc.Values.Push(val) + + if len(inc.Values) > indicator.MaxNumOfVOL { + inc.Values = inc.Values[indicator.MaxNumOfVOLTruncateSize-1:] + } + + inc.EndTime = klines[end].GetEndTime().Time() + + inc.EmitUpdate(val) + +} + +func (inc *A34) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *A34) Bind(updater indicator.KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +func calculateA34(klines []types.KLine, valClose KLineValueMapper) (float64, error) { + window := 12 + length := len(klines) + if length == 0 || length < window { + return 0., fmt.Errorf("insufficient elements for calculating with window = %d", window) + } + var closes types.Float64Slice + + for _, k := range klines { + closes.Push(valClose(k)) + } + + c := closes.Last() + + sumC := 0. + for i := 1; i <= 12; i++ { + sumC += closes.Index(len(closes) - i) + } + + meanC := sumC / 12 + + alpha := meanC / c + + return alpha, nil +} diff --git a/pkg/strategy/fmaker/R.go b/pkg/strategy/fmaker/R.go new file mode 100644 index 0000000000..278dfdcaa6 --- /dev/null +++ b/pkg/strategy/fmaker/R.go @@ -0,0 +1,93 @@ +package fmaker + +import ( + "fmt" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" + "time" +) + +var zeroTime time.Time + +type KLineValueMapper func(k types.KLine) float64 + +//go:generate callbackgen -type R +type R struct { + types.IntervalWindow + + // Values + Values types.Float64Slice + + EndTime time.Time + + UpdateCallbacks []func(val float64) +} + +func (inc *R) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *R) calculateAndUpdate(klines []types.KLine) { + if len(klines) < inc.Window { + return + } + + var end = len(klines) - 1 + var lastKLine = klines[end] + + if inc.EndTime != zeroTime && lastKLine.GetEndTime().Before(inc.EndTime) { + return + } + + var recentT = klines[end-(inc.Window-1) : end+1] + + val, err := calculateR(recentT, indicator.KLineOpenPriceMapper, indicator.KLineClosePriceMapper) + if err != nil { + log.WithError(err).Error("can not calculate pivots") + return + } + inc.Values.Push(val) + + if len(inc.Values) > indicator.MaxNumOfVOL { + inc.Values = inc.Values[indicator.MaxNumOfVOLTruncateSize-1:] + } + + inc.EndTime = klines[end].GetEndTime().Time() + + inc.EmitUpdate(val) + +} + +func (inc *R) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *R) Bind(updater indicator.KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +func calculateR(klines []types.KLine, valOpen KLineValueMapper, valClose KLineValueMapper) (float64, error) { + window := 1 + length := len(klines) + if length == 0 || length < window { + return 0., fmt.Errorf("insufficient elements for calculating with window = %d", window) + } + var opens types.Float64Slice + var closes types.Float64Slice + + for _, k := range klines { + opens.Push(valOpen(k)) + closes.Push(valClose(k)) + } + + ret := opens.Index(0)/closes.Index(0) - 1 // delta(1 interval) + + return ret, nil +} diff --git a/pkg/strategy/fmaker/S0.go b/pkg/strategy/fmaker/S0.go new file mode 100644 index 0000000000..78ea60522a --- /dev/null +++ b/pkg/strategy/fmaker/S0.go @@ -0,0 +1,88 @@ +package fmaker + +import ( + "fmt" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" + "time" +) + +//go:generate callbackgen -type S0 +type S0 struct { + types.IntervalWindow + + // Values + Values types.Float64Slice + + EndTime time.Time + + UpdateCallbacks []func(val float64) +} + +func (inc *S0) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *S0) calculateAndUpdate(klines []types.KLine) { + if len(klines) < inc.Window { + return + } + + var end = len(klines) - 1 + var lastKLine = klines[end] + + if inc.EndTime != zeroTime && lastKLine.GetEndTime().Before(inc.EndTime) { + return + } + + var recentT = klines[end-(inc.Window-1) : end+1] + + val, err := calculateS0(recentT, indicator.KLineClosePriceMapper) + if err != nil { + log.WithError(err).Error("can not calculate") + return + } + inc.Values.Push(val) + + if len(inc.Values) > indicator.MaxNumOfVOL { + inc.Values = inc.Values[indicator.MaxNumOfVOLTruncateSize-1:] + } + + inc.EndTime = klines[end].GetEndTime().Time() + + inc.EmitUpdate(val) + +} + +func (inc *S0) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *S0) Bind(updater indicator.KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +func calculateS0(klines []types.KLine, valClose KLineValueMapper) (float64, error) { + window := 20 + length := len(klines) + if length == 0 || length < window { + return 0., fmt.Errorf("insufficient elements for calculating with window = %d", window) + } + var closes types.Float64Slice + + for _, k := range klines { + closes.Push(valClose(k)) + } + + sma := types.Float64Slice.Sum(closes[len(closes)-window:len(closes)-1]) / float64(window) + alpha := sma / closes.Last() + + return alpha, nil +} diff --git a/pkg/strategy/fmaker/S1.go b/pkg/strategy/fmaker/S1.go new file mode 100644 index 0000000000..85fdac13d0 --- /dev/null +++ b/pkg/strategy/fmaker/S1.go @@ -0,0 +1,99 @@ +package fmaker + +import ( + "fmt" + "math" + "time" + + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" +) + +//go:generate callbackgen -type S1 +type S1 struct { + types.IntervalWindow + Values types.Float64Slice + EndTime time.Time + + UpdateCallbacks []func(value float64) +} + +func (inc *S1) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *S1) calculateAndUpdate(klines []types.KLine) { + if len(klines) < inc.Window { + return + } + + var end = len(klines) - 1 + var lastKLine = klines[end] + + if inc.EndTime != zeroTime && lastKLine.GetEndTime().Before(inc.EndTime) { + return + } + + var recentT = klines[end-(inc.Window-1) : end+1] + + correlation, err := calculateS1(recentT, inc.Window, KLineAmplitudeMapper, indicator.KLineVolumeMapper) + if err != nil { + log.WithError(err).Error("can not calculate correlation") + return + } + inc.Values.Push(correlation) + + if len(inc.Values) > indicator.MaxNumOfVOL { + inc.Values = inc.Values[indicator.MaxNumOfVOLTruncateSize-1:] + } + + inc.EndTime = klines[end].GetEndTime().Time() + + inc.EmitUpdate(correlation) +} + +func (inc *S1) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *S1) Bind(updater indicator.KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +func calculateS1(klines []types.KLine, window int, valA KLineValueMapper, valB KLineValueMapper) (float64, error) { + length := len(klines) + if length == 0 || length < window { + return 0.0, fmt.Errorf("insufficient elements for calculating VOL with window = %d", window) + } + + sumA, sumB, sumAB, squareSumA, squareSumB := 0., 0., 0., 0., 0. + for _, k := range klines { + // sum of elements of array A + sumA += valA(k) + // sum of elements of array B + sumB += valB(k) + + // sum of A[i] * B[i]. + sumAB = sumAB + valA(k)*valB(k) + + // sum of square of array elements. + squareSumA = squareSumA + valA(k)*valA(k) + squareSumB = squareSumB + valB(k)*valB(k) + } + // use formula for calculating correlation coefficient. + corr := (float64(window)*sumAB - sumA*sumB) / + math.Sqrt((float64(window)*squareSumA-sumA*sumA)*(float64(window)*squareSumB-sumB*sumB)) + + return -corr, nil +} + +func KLineAmplitudeMapper(k types.KLine) float64 { + return k.High.Div(k.Low).Float64() +} diff --git a/pkg/strategy/fmaker/S2.go b/pkg/strategy/fmaker/S2.go new file mode 100644 index 0000000000..b52f49c111 --- /dev/null +++ b/pkg/strategy/fmaker/S2.go @@ -0,0 +1,95 @@ +package fmaker + +import ( + "fmt" + "math" + "time" + + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" +) + +//go:generate callbackgen -type S2 +type S2 struct { + types.IntervalWindow + Values types.Float64Slice + EndTime time.Time + + UpdateCallbacks []func(value float64) +} + +func (inc *S2) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *S2) calculateAndUpdate(klines []types.KLine) { + if len(klines) < inc.Window { + return + } + + var end = len(klines) - 1 + var lastKLine = klines[end] + + if inc.EndTime != zeroTime && lastKLine.GetEndTime().Before(inc.EndTime) { + return + } + + var recentT = klines[end-(inc.Window-1) : end+1] + + correlation, err := calculateS2(recentT, inc.Window, indicator.KLineOpenPriceMapper, indicator.KLineVolumeMapper) + if err != nil { + log.WithError(err).Error("can not calculate correlation") + return + } + inc.Values.Push(correlation) + + if len(inc.Values) > indicator.MaxNumOfVOL { + inc.Values = inc.Values[indicator.MaxNumOfVOLTruncateSize-1:] + } + + inc.EndTime = klines[end].GetEndTime().Time() + + inc.EmitUpdate(correlation) +} + +func (inc *S2) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *S2) Bind(updater indicator.KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +func calculateS2(klines []types.KLine, window int, valA KLineValueMapper, valB KLineValueMapper) (float64, error) { + length := len(klines) + if length == 0 || length < window { + return 0.0, fmt.Errorf("insufficient elements for calculating VOL with window = %d", window) + } + + sumA, sumB, sumAB, squareSumA, squareSumB := 0., 0., 0., 0., 0. + for _, k := range klines { + // sum of elements of array A + sumA += valA(k) + // sum of elements of array B + sumB += valB(k) + + // sum of A[i] * B[i]. + sumAB = sumAB + valA(k)*valB(k) + + // sum of square of array elements. + squareSumA = squareSumA + valA(k)*valA(k) + squareSumB = squareSumB + valB(k)*valB(k) + } + // use formula for calculating correlation coefficient. + corr := (float64(window)*sumAB - sumA*sumB) / + math.Sqrt((float64(window)*squareSumA-sumA*sumA)*(float64(window)*squareSumB-sumB*sumB)) + + return -corr, nil +} diff --git a/pkg/strategy/fmaker/S3.go b/pkg/strategy/fmaker/S3.go new file mode 100644 index 0000000000..bd585d48c0 --- /dev/null +++ b/pkg/strategy/fmaker/S3.go @@ -0,0 +1,91 @@ +package fmaker + +import ( + "fmt" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" + "time" +) + +//go:generate callbackgen -type S3 +type S3 struct { + types.IntervalWindow + + // Values + Values types.Float64Slice + + EndTime time.Time + + UpdateCallbacks []func(val float64) +} + +func (inc *S3) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *S3) calculateAndUpdate(klines []types.KLine) { + if len(klines) < inc.Window { + return + } + + var end = len(klines) - 1 + var lastKLine = klines[end] + + if inc.EndTime != zeroTime && lastKLine.GetEndTime().Before(inc.EndTime) { + return + } + + var recentT = klines[end-(inc.Window-1) : end+1] + + val, err := calculateS3(recentT, indicator.KLineClosePriceMapper, indicator.KLineOpenPriceMapper) + if err != nil { + log.WithError(err).Error("can not calculate") + return + } + inc.Values.Push(val) + + if len(inc.Values) > indicator.MaxNumOfVOL { + inc.Values = inc.Values[indicator.MaxNumOfVOLTruncateSize-1:] + } + + inc.EndTime = klines[end].GetEndTime().Time() + + inc.EmitUpdate(val) + +} + +func (inc *S3) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *S3) Bind(updater indicator.KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +func calculateS3(klines []types.KLine, valClose KLineValueMapper, valOpen KLineValueMapper) (float64, error) { + window := 2 + length := len(klines) + if length == 0 || length < window { + return 0., fmt.Errorf("insufficient elements for calculating with window = %d", window) + } + var closes types.Float64Slice + var opens types.Float64Slice + + for _, k := range klines { + closes.Push(valClose(k)) + opens.Push(valOpen(k)) + } + + prevC := closes.Index(1) + currO := opens.Index(0) + alpha := currO / prevC + + return alpha, nil +} diff --git a/pkg/strategy/fmaker/S4.go b/pkg/strategy/fmaker/S4.go new file mode 100644 index 0000000000..5e204dc0d8 --- /dev/null +++ b/pkg/strategy/fmaker/S4.go @@ -0,0 +1,88 @@ +package fmaker + +import ( + "fmt" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" + "time" +) + +//go:generate callbackgen -type S4 +type S4 struct { + types.IntervalWindow + + // Values + Values types.Float64Slice + + EndTime time.Time + + UpdateCallbacks []func(val float64) +} + +func (inc *S4) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *S4) calculateAndUpdate(klines []types.KLine) { + if len(klines) < inc.Window { + return + } + + var end = len(klines) - 1 + var lastKLine = klines[end] + + if inc.EndTime != zeroTime && lastKLine.GetEndTime().Before(inc.EndTime) { + return + } + + var recentT = klines[end-(inc.Window-1) : end+1] + + val, err := calculateS4(recentT, indicator.KLineClosePriceMapper) + if err != nil { + log.WithError(err).Error("can not calculate") + return + } + inc.Values.Push(val) + + if len(inc.Values) > indicator.MaxNumOfVOL { + inc.Values = inc.Values[indicator.MaxNumOfVOLTruncateSize-1:] + } + + inc.EndTime = klines[end].GetEndTime().Time() + + inc.EmitUpdate(val) + +} + +func (inc *S4) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *S4) Bind(updater indicator.KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +func calculateS4(klines []types.KLine, valClose KLineValueMapper) (float64, error) { + window := 2 + length := len(klines) + if length == 0 || length < window { + return 0., fmt.Errorf("insufficient elements for calculating with window = %d", window) + } + var closes types.Float64Slice + + for _, k := range klines { + closes.Push(valClose(k)) + } + + currC := closes.Index(0) + alpha := 1 / currC + + return alpha, nil +} diff --git a/pkg/strategy/fmaker/S5.go b/pkg/strategy/fmaker/S5.go new file mode 100644 index 0000000000..0cc4c54b8d --- /dev/null +++ b/pkg/strategy/fmaker/S5.go @@ -0,0 +1,96 @@ +package fmaker + +import ( + "fmt" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" + "time" +) + +//go:generate callbackgen -type S5 +type S5 struct { + types.IntervalWindow + + // Values + Values types.Float64Slice + + EndTime time.Time + + UpdateCallbacks []func(val float64) +} + +func (inc *S5) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *S5) calculateAndUpdate(klines []types.KLine) { + if len(klines) < inc.Window { + return + } + + var end = len(klines) - 1 + var lastKLine = klines[end] + + if inc.EndTime != zeroTime && lastKLine.GetEndTime().Before(inc.EndTime) { + return + } + + var recentT = klines[end-(inc.Window-1) : end+1] + + val, err := calculateS5(recentT, indicator.KLineVolumeMapper) + if err != nil { + log.WithError(err).Error("can not calculate pivots") + return + } + inc.Values.Push(val) + + if len(inc.Values) > indicator.MaxNumOfVOL { + inc.Values = inc.Values[indicator.MaxNumOfVOLTruncateSize-1:] + } + + inc.EndTime = klines[end].GetEndTime().Time() + + inc.EmitUpdate(val) + +} + +func (inc *S5) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *S5) Bind(updater indicator.KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +func calculateS5(klines []types.KLine, valVolume KLineValueMapper) (float64, error) { + window := 10 + length := len(klines) + if length == 0 || length < window { + return 0., fmt.Errorf("insufficient elements for calculating with window = %d", window) + } + var volumes types.Float64Slice + + for _, k := range klines { + volumes.Push(valVolume(k)) + } + + v := volumes.Last() + + sumV := 0. + for i := 1; i <= 10; i++ { + sumV += volumes.Index(len(volumes) - i) + } + + meanV := sumV / 10 + + alpha := -v / meanV + + return alpha, nil +} diff --git a/pkg/strategy/fmaker/S6.go b/pkg/strategy/fmaker/S6.go new file mode 100644 index 0000000000..e4db9e4f2a --- /dev/null +++ b/pkg/strategy/fmaker/S6.go @@ -0,0 +1,98 @@ +package fmaker + +import ( + "fmt" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" + "time" +) + +//go:generate callbackgen -type S6 +type S6 struct { + types.IntervalWindow + + // Values + Values types.Float64Slice + + EndTime time.Time + + UpdateCallbacks []func(val float64) +} + +func (inc *S6) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *S6) calculateAndUpdate(klines []types.KLine) { + if len(klines) < inc.Window { + return + } + + var end = len(klines) - 1 + var lastKLine = klines[end] + + if inc.EndTime != zeroTime && lastKLine.GetEndTime().Before(inc.EndTime) { + return + } + + var recentT = klines[end-(inc.Window-1) : end+1] + + val, err := calculateS6(recentT, indicator.KLineHighPriceMapper, indicator.KLineLowPriceMapper, indicator.KLineClosePriceMapper, indicator.KLineVolumeMapper) + if err != nil { + log.WithError(err).Error("can not calculate") + return + } + inc.Values.Push(val) + + if len(inc.Values) > indicator.MaxNumOfVOL { + inc.Values = inc.Values[indicator.MaxNumOfVOLTruncateSize-1:] + } + + inc.EndTime = klines[end].GetEndTime().Time() + + inc.EmitUpdate(val) + +} + +func (inc *S6) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *S6) Bind(updater indicator.KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +func calculateS6(klines []types.KLine, valHigh KLineValueMapper, valLow KLineValueMapper, valClose KLineValueMapper, valVolume KLineValueMapper) (float64, error) { + window := 2 + length := len(klines) + if length == 0 || length < window { + return 0., fmt.Errorf("insufficient elements for calculating with window = %d", window) + } + var highs types.Float64Slice + var lows types.Float64Slice + var closes types.Float64Slice + var volumes types.Float64Slice + + for _, k := range klines { + highs.Push(valHigh(k)) + lows.Push(valLow(k)) + closes.Push(valClose(k)) + volumes.Push(valVolume(k)) + + } + + H := highs.Last() + L := lows.Last() + C := closes.Last() + V := volumes.Last() + alpha := (H + L + C) / 3 * V + + return alpha, nil +} diff --git a/pkg/strategy/fmaker/S7.go b/pkg/strategy/fmaker/S7.go new file mode 100644 index 0000000000..d5f0b5f705 --- /dev/null +++ b/pkg/strategy/fmaker/S7.go @@ -0,0 +1,92 @@ +package fmaker + +import ( + "fmt" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" + "time" +) + +//go:generate callbackgen -type S7 +type S7 struct { + types.IntervalWindow + + // Values + Values types.Float64Slice + + EndTime time.Time + + UpdateCallbacks []func(val float64) +} + +func (inc *S7) Last() float64 { + if len(inc.Values) == 0 { + return 0.0 + } + return inc.Values[len(inc.Values)-1] +} + +func (inc *S7) calculateAndUpdate(klines []types.KLine) { + if len(klines) < inc.Window { + return + } + + var end = len(klines) - 1 + var lastKLine = klines[end] + + if inc.EndTime != zeroTime && lastKLine.GetEndTime().Before(inc.EndTime) { + return + } + + var recentT = klines[end-(inc.Window-1) : end+1] + + val, err := calculateS7(recentT, indicator.KLineOpenPriceMapper, indicator.KLineClosePriceMapper) + if err != nil { + log.WithError(err).Error("can not calculate") + return + } + inc.Values.Push(val) + + if len(inc.Values) > indicator.MaxNumOfVOL { + inc.Values = inc.Values[indicator.MaxNumOfVOLTruncateSize-1:] + } + + inc.EndTime = klines[end].GetEndTime().Time() + + inc.EmitUpdate(val) + +} + +func (inc *S7) handleKLineWindowUpdate(interval types.Interval, window types.KLineWindow) { + if inc.Interval != interval { + return + } + + inc.calculateAndUpdate(window) +} + +func (inc *S7) Bind(updater indicator.KLineWindowUpdater) { + updater.OnKLineWindowUpdate(inc.handleKLineWindowUpdate) +} + +func calculateS7(klines []types.KLine, valOpen KLineValueMapper, valClose KLineValueMapper) (float64, error) { + window := 2 + length := len(klines) + if length == 0 || length < window { + return 0., fmt.Errorf("insufficient elements for calculating with window = %d", window) + } + var opens types.Float64Slice + var closes types.Float64Slice + + for _, k := range klines { + opens.Push(valOpen(k)) + closes.Push(valClose(k)) + + } + + O := opens.Last() + C := closes.Last() + alpha := -(1 - O/C) + + return alpha, nil +} diff --git a/pkg/strategy/fmaker/a18_callbacks.go b/pkg/strategy/fmaker/a18_callbacks.go new file mode 100644 index 0000000000..c6bd0c45e2 --- /dev/null +++ b/pkg/strategy/fmaker/a18_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type A18"; DO NOT EDIT. + +package fmaker + +import () + +func (inc *A18) OnUpdate(cb func(val float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *A18) EmitUpdate(val float64) { + for _, cb := range inc.UpdateCallbacks { + cb(val) + } +} diff --git a/pkg/strategy/fmaker/a2_callbacks.go b/pkg/strategy/fmaker/a2_callbacks.go new file mode 100644 index 0000000000..d1fdf00f34 --- /dev/null +++ b/pkg/strategy/fmaker/a2_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type A2"; DO NOT EDIT. + +package fmaker + +import () + +func (inc *A2) OnUpdate(cb func(val float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *A2) EmitUpdate(val float64) { + for _, cb := range inc.UpdateCallbacks { + cb(val) + } +} diff --git a/pkg/strategy/fmaker/a34_callbacks.go b/pkg/strategy/fmaker/a34_callbacks.go new file mode 100644 index 0000000000..fb128efadb --- /dev/null +++ b/pkg/strategy/fmaker/a34_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type A34"; DO NOT EDIT. + +package fmaker + +import () + +func (inc *A34) OnUpdate(cb func(val float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *A34) EmitUpdate(val float64) { + for _, cb := range inc.UpdateCallbacks { + cb(val) + } +} diff --git a/pkg/strategy/fmaker/a3_callbacks.go b/pkg/strategy/fmaker/a3_callbacks.go new file mode 100644 index 0000000000..ad83cd8be8 --- /dev/null +++ b/pkg/strategy/fmaker/a3_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type A3"; DO NOT EDIT. + +package fmaker + +import () + +func (inc *A3) OnUpdate(cb func(val float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *A3) EmitUpdate(val float64) { + for _, cb := range inc.UpdateCallbacks { + cb(val) + } +} diff --git a/pkg/strategy/fmaker/r_callbacks.go b/pkg/strategy/fmaker/r_callbacks.go new file mode 100644 index 0000000000..afc55e417e --- /dev/null +++ b/pkg/strategy/fmaker/r_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type R"; DO NOT EDIT. + +package fmaker + +import () + +func (inc *R) OnUpdate(cb func(val float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *R) EmitUpdate(val float64) { + for _, cb := range inc.UpdateCallbacks { + cb(val) + } +} diff --git a/pkg/strategy/fmaker/s0_callbacks.go b/pkg/strategy/fmaker/s0_callbacks.go new file mode 100644 index 0000000000..1d384c83b0 --- /dev/null +++ b/pkg/strategy/fmaker/s0_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type S0"; DO NOT EDIT. + +package fmaker + +import () + +func (inc *S0) OnUpdate(cb func(val float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *S0) EmitUpdate(val float64) { + for _, cb := range inc.UpdateCallbacks { + cb(val) + } +} diff --git a/pkg/strategy/fmaker/s1_callbacks.go b/pkg/strategy/fmaker/s1_callbacks.go new file mode 100644 index 0000000000..5d7eb0119b --- /dev/null +++ b/pkg/strategy/fmaker/s1_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type S1"; DO NOT EDIT. + +package fmaker + +import () + +func (inc *S1) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *S1) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/strategy/fmaker/s2_callbacks.go b/pkg/strategy/fmaker/s2_callbacks.go new file mode 100644 index 0000000000..c65a7af719 --- /dev/null +++ b/pkg/strategy/fmaker/s2_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type S2"; DO NOT EDIT. + +package fmaker + +import () + +func (inc *S2) OnUpdate(cb func(value float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *S2) EmitUpdate(value float64) { + for _, cb := range inc.UpdateCallbacks { + cb(value) + } +} diff --git a/pkg/strategy/fmaker/s3_callbacks.go b/pkg/strategy/fmaker/s3_callbacks.go new file mode 100644 index 0000000000..01a6ea01e1 --- /dev/null +++ b/pkg/strategy/fmaker/s3_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type S3"; DO NOT EDIT. + +package fmaker + +import () + +func (inc *S3) OnUpdate(cb func(val float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *S3) EmitUpdate(val float64) { + for _, cb := range inc.UpdateCallbacks { + cb(val) + } +} diff --git a/pkg/strategy/fmaker/s4_callbacks.go b/pkg/strategy/fmaker/s4_callbacks.go new file mode 100644 index 0000000000..0d00584403 --- /dev/null +++ b/pkg/strategy/fmaker/s4_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type S4"; DO NOT EDIT. + +package fmaker + +import () + +func (inc *S4) OnUpdate(cb func(val float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *S4) EmitUpdate(val float64) { + for _, cb := range inc.UpdateCallbacks { + cb(val) + } +} diff --git a/pkg/strategy/fmaker/s5_callbacks.go b/pkg/strategy/fmaker/s5_callbacks.go new file mode 100644 index 0000000000..65f7f9a8f4 --- /dev/null +++ b/pkg/strategy/fmaker/s5_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type S5"; DO NOT EDIT. + +package fmaker + +import () + +func (inc *S5) OnUpdate(cb func(val float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *S5) EmitUpdate(val float64) { + for _, cb := range inc.UpdateCallbacks { + cb(val) + } +} diff --git a/pkg/strategy/fmaker/s6_callbacks.go b/pkg/strategy/fmaker/s6_callbacks.go new file mode 100644 index 0000000000..33daec76e5 --- /dev/null +++ b/pkg/strategy/fmaker/s6_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type S6"; DO NOT EDIT. + +package fmaker + +import () + +func (inc *S6) OnUpdate(cb func(val float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *S6) EmitUpdate(val float64) { + for _, cb := range inc.UpdateCallbacks { + cb(val) + } +} diff --git a/pkg/strategy/fmaker/s7_callbacks.go b/pkg/strategy/fmaker/s7_callbacks.go new file mode 100644 index 0000000000..fec9457d74 --- /dev/null +++ b/pkg/strategy/fmaker/s7_callbacks.go @@ -0,0 +1,15 @@ +// Code generated by "callbackgen -type S7"; DO NOT EDIT. + +package fmaker + +import () + +func (inc *S7) OnUpdate(cb func(val float64)) { + inc.UpdateCallbacks = append(inc.UpdateCallbacks, cb) +} + +func (inc *S7) EmitUpdate(val float64) { + for _, cb := range inc.UpdateCallbacks { + cb(val) + } +} diff --git a/pkg/strategy/fmaker/strategy.go b/pkg/strategy/fmaker/strategy.go new file mode 100644 index 0000000000..d67367569b --- /dev/null +++ b/pkg/strategy/fmaker/strategy.go @@ -0,0 +1,534 @@ +package fmaker + +import ( + "context" + "fmt" + "math" + + "github.com/sajari/regression" + "github.com/sirupsen/logrus" + "gonum.org/v1/gonum/floats" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +const ID = "fmaker" + +var fifteen = fixedpoint.NewFromInt(15) +var three = fixedpoint.NewFromInt(3) +var two = fixedpoint.NewFromInt(2) + +var log = logrus.WithField("strategy", ID) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type IntervalWindowSetting struct { + types.IntervalWindow +} + +type Strategy struct { + *bbgo.Graceful + *bbgo.Persistence + + Environment *bbgo.Environment + Symbol string `json:"symbol"` + Market types.Market + Interval types.Interval `json:"interval"` + Quantity fixedpoint.Value `json:"quantity"` + + // persistence fields + Position *types.Position `json:"position,omitempty" persistence:"position"` + ProfitStats *types.ProfitStats `json:"profitStats,omitempty" persistence:"profit_stats"` + + Spread fixedpoint.Value `json:"spread" persistence:"spread"` + + activeMakerOrders *bbgo.ActiveOrderBook + // closePositionOrders *bbgo.LocalActiveOrderBook + + orderStore *bbgo.OrderStore + tradeCollector *bbgo.TradeCollector + + session *bbgo.ExchangeSession + + bbgo.QuantityOrAmount + + S0 *S0 + S1 *S1 + S2 *S2 + S3 *S3 + S4 *S4 + S5 *S5 + S6 *S6 + S7 *S7 + + A2 *A2 + A3 *A3 + A18 *A18 + A34 *A34 + + R *R + + // StrategyController + bbgo.StrategyController +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + log.Infof("subscribe %s", s.Symbol) + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.Interval}) + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: types.Interval15m}) + +} + +func (s *Strategy) placeOrder(ctx context.Context, price fixedpoint.Value, qty fixedpoint.Value, orderExecutor bbgo.OrderExecutor) { + submitOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeSell, + Type: types.OrderTypeLimit, + Price: price, + Quantity: qty, + } + createdOrders, err := orderExecutor.SubmitOrders(ctx, submitOrder) + if err != nil { + log.WithError(err).Errorf("can not place orders") + } + s.orderStore.Add(createdOrders...) + s.activeMakerOrders.Add(createdOrders...) + // s.tradeCollector.Process() +} + +func (s *Strategy) ClosePosition(ctx context.Context, percentage fixedpoint.Value) error { + base := s.Position.GetBase() + if base.IsZero() { + return fmt.Errorf("no opened %s position", s.Position.Symbol) + } + + // make it negative + quantity := base.Mul(percentage).Abs() + side := types.SideTypeBuy + if base.Sign() > 0 { + side = types.SideTypeSell + } + + if quantity.Compare(s.Market.MinQuantity) < 0 { + return fmt.Errorf("order quantity %v is too small, less than %v", quantity, s.Market.MinQuantity) + } + + submitOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: side, + Type: types.OrderTypeMarket, + Quantity: quantity, + // Price: closePrice, + Market: s.Market, + } + + // s.Notify("Submitting %s %s order to close position by %v", s.Symbol, side.String(), percentage, submitOrder) + + createdOrders, err := s.session.Exchange.SubmitOrders(ctx, submitOrder) + if err != nil { + log.WithError(err).Errorf("can not place position close order") + } + + s.orderStore.Add(createdOrders...) + s.activeMakerOrders.Add(createdOrders...) + return err +} +func (s *Strategy) InstanceID() string { + return fmt.Sprintf("%s:%s", ID, s.Symbol) +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + // initial required information + s.session = session + // s.prevClose = fixedpoint.Zero + + // first we need to get market data store(cached market data) from the exchange session + // st, _ := session.MarketDataStore(s.Symbol) + + s.activeMakerOrders = bbgo.NewActiveOrderBook(s.Symbol) + s.activeMakerOrders.BindStream(session.UserDataStream) + + // s.closePositionOrders = bbgo.NewLocalActiveOrderBook(s.Symbol) + // s.closePositionOrders.BindStream(session.UserDataStream) + + s.orderStore = bbgo.NewOrderStore(s.Symbol) + s.orderStore.BindStream(session.UserDataStream) + + if s.Position == nil { + s.Position = types.NewPositionFromMarket(s.Market) + } + + // calculate group id for orders + instanceID := s.InstanceID() + // s.groupID = util.FNV32(instanceID) + + // Always update the position fields + s.Position.Strategy = ID + s.Position.StrategyInstanceID = instanceID + + s.tradeCollector = bbgo.NewTradeCollector(s.Symbol, s.Position, s.orderStore) + s.tradeCollector.OnTrade(func(trade types.Trade, profit, netProfit fixedpoint.Value) { + // StrategyController + if s.Status != types.StrategyStatusRunning { + return + } + + bbgo.Notify(trade) + s.ProfitStats.AddTrade(trade) + + if profit.Compare(fixedpoint.Zero) == 0 { + s.Environment.RecordPosition(s.Position, trade, nil) + } else { + log.Infof("%s generated profit: %v", s.Symbol, profit) + p := s.Position.NewProfit(trade, profit, netProfit) + p.Strategy = ID + p.StrategyInstanceID = instanceID + bbgo.Notify(&p) + + s.ProfitStats.AddProfit(p) + bbgo.Notify(&s.ProfitStats) + + s.Environment.RecordPosition(s.Position, trade, &p) + } + }) + + s.tradeCollector.OnPositionUpdate(func(position *types.Position) { + log.Infof("position changed: %s", s.Position) + bbgo.Notify(s.Position) + }) + s.tradeCollector.BindStream(session.UserDataStream) + st, _ := session.MarketDataStore(s.Symbol) + + riw := types.IntervalWindow{Window: 1, Interval: s.Interval} + s.R = &R{IntervalWindow: riw} + s.R.Bind(st) + + s0iw := types.IntervalWindow{Window: 20, Interval: s.Interval} + s.S0 = &S0{IntervalWindow: s0iw} + s.S0.Bind(st) + + s1iw := types.IntervalWindow{Window: 20, Interval: s.Interval} + s.S1 = &S1{IntervalWindow: s1iw} + s.S1.Bind(st) + + s2iw := types.IntervalWindow{Window: 20, Interval: s.Interval} + s.S2 = &S2{IntervalWindow: s2iw} + s.S2.Bind(st) + + s3iw := types.IntervalWindow{Window: 2, Interval: s.Interval} + s.S3 = &S3{IntervalWindow: s3iw} + s.S3.Bind(st) + + s4iw := types.IntervalWindow{Window: 2, Interval: s.Interval} + s.S4 = &S4{IntervalWindow: s4iw} + s.S4.Bind(st) + + s5iw := types.IntervalWindow{Window: 10, Interval: s.Interval} + s.S5 = &S5{IntervalWindow: s5iw} + s.S5.Bind(st) + + s6iw := types.IntervalWindow{Window: 2, Interval: s.Interval} + s.S6 = &S6{IntervalWindow: s6iw} + s.S6.Bind(st) + + s7iw := types.IntervalWindow{Window: 2, Interval: s.Interval} + s.S7 = &S7{IntervalWindow: s7iw} + s.S7.Bind(st) + + a2iw := types.IntervalWindow{Window: 2, Interval: s.Interval} + s.A2 = &A2{IntervalWindow: a2iw} + s.A2.Bind(st) + + a3iw := types.IntervalWindow{Window: 8, Interval: s.Interval} + s.A3 = &A3{IntervalWindow: a3iw} + s.A3.Bind(st) + + a18iw := types.IntervalWindow{Window: 5, Interval: s.Interval} + s.A18 = &A18{IntervalWindow: a18iw} + s.A18.Bind(st) + + a34iw := types.IntervalWindow{Window: 12, Interval: s.Interval} + s.A34 = &A34{IntervalWindow: a34iw} + s.A34.Bind(st) + + session.UserDataStream.OnStart(func() { + log.Infof("connected") + }) + + outlook := 1 + + // futuresMode := s.session.Futures || s.session.IsolatedFutures + cnt := 0 + + // var prevEr float64 + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + + // if kline.Interval == types.Interval15m && kline.Symbol == s.Symbol && !s.Market.IsDustQuantity(s.Position.GetBase(), kline.Close) { + // if err := s.activeMakerOrders.GracefulCancel(ctx, s.session.Exchange); err != nil { + // log.WithError(err).Errorf("graceful cancel order error") + // } + // s.ClosePosition(ctx, fixedpoint.One) + // s.tradeCollector.Process() + // } + if kline.Symbol != s.Symbol || kline.Interval != s.Interval { + return + } + + if err := s.activeMakerOrders.GracefulCancel(ctx, s.session.Exchange); err != nil { + log.WithError(err).Errorf("graceful cancel order error") + } + + cnt += 1 + if cnt < 15+1+outlook { + return + } + + r := new(regression.Regression) + r.SetObserved("Return Rate Per Interval") + r.SetVar(0, "S0") + r.SetVar(1, "S1") + r.SetVar(2, "S2") + // r.SetVar(2, "S3") + r.SetVar(3, "S4") + r.SetVar(4, "S5") + r.SetVar(5, "S6") + r.SetVar(6, "S7") + r.SetVar(7, "A2") + r.SetVar(8, "A3") + r.SetVar(9, "A18") + r.SetVar(10, "A34") + + var rdps regression.DataPoints + + for i := 1; i <= 15; i++ { + s0 := s.S0.Values[len(s.S0.Values)-i-outlook] + s1 := s.S1.Values[len(s.S1.Values)-i-outlook] + s2 := s.S2.Values[len(s.S2.Values)-i-outlook] + // s3 := s.S3.Values[len(s.S3.Values)-i-1] + s4 := s.S4.Values[len(s.S4.Values)-i-outlook] + s5 := s.S5.Values[len(s.S5.Values)-i-outlook] + s6 := s.S6.Values[len(s.S6.Values)-i-outlook] + s7 := s.S7.Values[len(s.S7.Values)-i-outlook] + a2 := s.A2.Values[len(s.A2.Values)-i-outlook] + a3 := s.A3.Values[len(s.A3.Values)-i-outlook] + a18 := s.A18.Values[len(s.A18.Values)-i-outlook] + a34 := s.A34.Values[len(s.A34.Values)-i-outlook] + + ret := s.R.Values[len(s.R.Values)-i] + rdps = append(rdps, regression.DataPoint(ret, types.Float64Slice{s0, s1, s2, s4, s5, s6, s7, a2, a3, a18, a34})) + } + // for i := 40; i > 20; i-- { + // s0 := preprocessing(s.S0.Values[len(s.S0.Values)-i : len(s.S0.Values)-i+20-outlook]) + // s1 := preprocessing(s.S1.Values[len(s.S1.Values)-i : len(s.S1.Values)-i+20-outlook]) + // s2 := preprocessing(s.S2.Values[len(s.S2.Values)-i : len(s.S2.Values)-i+20-outlook]) + // //s3 := s.S3.Values[len(s.S3.Values)-i-1] + // s4 := preprocessing(s.S4.Values[len(s.S4.Values)-i : len(s.S4.Values)-i+20-outlook]) + // s5 := preprocessing(s.S5.Values[len(s.S5.Values)-i : len(s.S5.Values)-i+20-outlook]) + // a2 := preprocessing(s.A2.Values[len(s.A2.Values)-i : len(s.A2.Values)-i+20-outlook]) + // a3 := preprocessing(s.A3.Values[len(s.A3.Values)-i : len(s.A3.Values)-i+20-outlook]) + // a18 := preprocessing(s.A18.Values[len(s.A18.Values)-i : len(s.A18.Values)-i+20-outlook]) + // a34 := preprocessing(s.A18.Values[len(s.A18.Values)-i : len(s.A18.Values)-i+20-outlook]) + // + // ret := s.R.Values[len(s.R.Values)-i] + // rdps = append(rdps, regression.DataPoint(ret, types.Float64Slice{s0, s1, s2, s4, s5, a2, a3, a18, a34})) + // } + r.Train(rdps...) + r.Run() + er, _ := r.Predict(types.Float64Slice{s.S0.Last(), s.S1.Last(), s.S2.Last(), s.S4.Last(), s.S5.Last(), s.S6.Last(), s.S7.Last(), s.A2.Last(), s.A3.Last(), s.A18.Last(), s.A34.Last()}) + log.Infof("Expected Return Rate: %f", er) + + q := new(regression.Regression) + q.SetObserved("Order Quantity Per Interval") + q.SetVar(0, "S0") + q.SetVar(1, "S1") + q.SetVar(2, "S2") + // q.SetVar(2, "S3") + q.SetVar(3, "S4") + q.SetVar(4, "S5") + q.SetVar(5, "S6") + q.SetVar(6, "S7") + q.SetVar(7, "A2") + q.SetVar(8, "A3") + q.SetVar(9, "A18") + q.SetVar(10, "A34") + + var qdps regression.DataPoints + + for i := 1; i <= 15; i++ { + s0 := math.Pow(s.S0.Values[len(s.S0.Values)-i-outlook], 1) + s1 := math.Pow(s.S1.Values[len(s.S1.Values)-i-outlook], 1) + s2 := math.Pow(s.S2.Values[len(s.S2.Values)-i-outlook], 1) + // s3 := s.S3.Values[len(s.S3.Values)-i-1] + s4 := math.Pow(s.S4.Values[len(s.S4.Values)-i-outlook], 1) + s5 := math.Pow(s.S5.Values[len(s.S5.Values)-i-outlook], 1) + s6 := s.S6.Values[len(s.S6.Values)-i-outlook] + s7 := s.S7.Values[len(s.S7.Values)-i-outlook] + a2 := math.Pow(s.A2.Values[len(s.A2.Values)-i-outlook], 1) + a3 := math.Pow(s.A3.Values[len(s.A3.Values)-i-outlook], 1) + a18 := math.Pow(s.A18.Values[len(s.A18.Values)-i-outlook], 1) + a34 := math.Pow(s.A34.Values[len(s.A34.Values)-i-outlook], 1) + + ret := s.R.Values[len(s.R.Values)-i] + qty := math.Abs(ret) + qdps = append(qdps, regression.DataPoint(qty, types.Float64Slice{s0, s1, s2, s4, s5, s6, s7, a2, a3, a18, a34})) + } + // for i := 40; i > 20; i-- { + // s0 := preprocessing(s.S0.Values[len(s.S0.Values)-i : len(s.S0.Values)-i+20-outlook]) + // s1 := preprocessing(s.S1.Values[len(s.S1.Values)-i : len(s.S1.Values)-i+20-outlook]) + // s2 := preprocessing(s.S2.Values[len(s.S2.Values)-i : len(s.S2.Values)-i+20-outlook]) + // //s3 := s.S3.Values[len(s.S3.Values)-i-1] + // s4 := preprocessing(s.S4.Values[len(s.S4.Values)-i : len(s.S4.Values)-i+20-outlook]) + // s5 := preprocessing(s.S5.Values[len(s.S5.Values)-i : len(s.S5.Values)-i+20-outlook]) + // a2 := preprocessing(s.A2.Values[len(s.A2.Values)-i : len(s.A2.Values)-i+20-outlook]) + // a3 := preprocessing(s.A3.Values[len(s.A3.Values)-i : len(s.A3.Values)-i+20-outlook]) + // a18 := preprocessing(s.A18.Values[len(s.A18.Values)-i : len(s.A18.Values)-i+20-outlook]) + // a34 := preprocessing(s.A18.Values[len(s.A18.Values)-i : len(s.A18.Values)-i+20-outlook]) + // + // ret := s.R.Values[len(s.R.Values)-i] + // qty := math.Abs(ret) + // qdps = append(qdps, regression.DataPoint(qty, types.Float64Slice{s0, s1, s2, s4, s5, a2, a3, a18, a34})) + // } + q.Train(qdps...) + + q.Run() + + log.Info(s.S0.Last(), s.S1.Last(), s.S2.Last(), s.S3.Last(), s.S4.Last(), s.S5.Last(), s.S6.Last(), s.S7.Last(), s.A2.Last(), s.A3.Last(), s.A18.Last(), s.A34.Last()) + + log.Infof("Return Rate Regression formula:\n%v", r.Formula) + log.Infof("Order Quantity Regression formula:\n%v", q.Formula) + + // s0 := preprocessing(s.S0.Values[len(s.S0.Values)-20 : len(s.S0.Values)-1]) + // s1 := preprocessing(s.S1.Values[len(s.S1.Values)-20 : len(s.S1.Values)-1-outlook]) + // s2 := preprocessing(s.S2.Values[len(s.S2.Values)-20 : len(s.S2.Values)-1-outlook]) + // //s3 := s.S3.Values[len(s.S3.Values)-i-1] + // s4 := preprocessing(s.S4.Values[len(s.S4.Values)-20 : len(s.S4.Values)-1-outlook]) + // s5 := preprocessing(s.S5.Values[len(s.S5.Values)-20 : len(s.S5.Values)-1-outlook]) + // a2 := preprocessing(s.A2.Values[len(s.A2.Values)-20 : len(s.A2.Values)-1-outlook]) + // a3 := preprocessing(s.A3.Values[len(s.A3.Values)-20 : len(s.A3.Values)-1-outlook]) + // a18 := preprocessing(s.A18.Values[len(s.A18.Values)-20 : len(s.A18.Values)-1-outlook]) + // a34 := preprocessing(s.A18.Values[len(s.A18.Values)-20 : len(s.A18.Values)-1-outlook]) + // er, _ := r.Predict(types.Float64Slice{s0, s1, s2, s4, s5, a2, a3, a18, a34}) + // eq, _ := q.Predict(types.Float64Slice{s0, s1, s2, s4, s5, a2, a3, a18, a34}) + eq, _ := q.Predict(types.Float64Slice{s.S0.Last(), s.S1.Last(), s.S2.Last(), s.S4.Last(), s.S5.Last(), s.S6.Last(), s.S7.Last(), s.A2.Last(), s.A3.Last(), s.A18.Last(), s.A34.Last(), er}) + log.Infof("Expected Order Quantity: %f", eq) + // if float64(s.Position.GetBase().Sign())*er < 0 { + // s.ClosePosition(ctx, fixedpoint.One, kline.Close) + // s.tradeCollector.Process() + // } + // prevEr = er + + // spd := s.Spread.Float64() + + // inventory = m * alpha + spread + AskAlphaBoundary := (s.Position.GetBase().Mul(kline.Close).Float64() - 100) / 10000 + BidAlphaBoundary := (s.Position.GetBase().Mul(kline.Close).Float64() + 100) / 10000 + + log.Info(s.Position.GetBase().Mul(kline.Close).Float64(), AskAlphaBoundary, er, BidAlphaBoundary) + + BidPrice := kline.Close.Mul(fixedpoint.One.Sub(s.Spread)) + BidQty := s.QuantityOrAmount.CalculateQuantity(BidPrice) + BidQty = BidQty // .Mul(fixedpoint.One.Add(fixedpoint.NewFromFloat(eq))) + + AskPrice := kline.Close.Mul(fixedpoint.One.Add(s.Spread)) + AskQty := s.QuantityOrAmount.CalculateQuantity(AskPrice) + AskQty = AskQty // .Mul(fixedpoint.One.Add(fixedpoint.NewFromFloat(eq))) + + if er > 0 || (er < 0 && er > AskAlphaBoundary/kline.Close.Float64()) { + submitOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeBuy, + Type: types.OrderTypeLimitMaker, + Price: BidPrice, + Quantity: BidQty, // 0.0005 + } + createdOrders, err := orderExecutor.SubmitOrders(ctx, submitOrder) + if err != nil { + log.WithError(err).Errorf("can not place orders") + } + s.orderStore.Add(createdOrders...) + s.activeMakerOrders.Add(createdOrders...) + s.tradeCollector.Process() + + // submitOrder = types.SubmitOrder{ + // Symbol: s.Symbol, + // Side: types.SideTypeSell, + // Type: types.OrderTypeLimitMaker, + // Price: kline.Close.Mul(fixedpoint.One.Add(s.Spread)), + // Quantity: fixedpoint.NewFromFloat(math.Max(math.Min(eq, 0.003), 0.0005)), //0.0005 + // } + // createdOrders, err = orderExecutor.SubmitOrders(ctx, submitOrder) + // if err != nil { + // log.WithError(err).Errorf("can not place orders") + // } + // s.orderStore.Add(createdOrders...) + // s.activeMakerOrders.Add(createdOrders...) + // s.tradeCollector.Process() + } + if er < 0 || (er > 0 && er < BidAlphaBoundary/kline.Close.Float64()) { + submitOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeSell, + Type: types.OrderTypeLimitMaker, + Price: AskPrice, + Quantity: AskQty, // 0.0005 + } + createdOrders, err := orderExecutor.SubmitOrders(ctx, submitOrder) + if err != nil { + log.WithError(err).Errorf("can not place orders") + } + s.orderStore.Add(createdOrders...) + s.activeMakerOrders.Add(createdOrders...) + s.tradeCollector.Process() + + // submitOrder = types.SubmitOrder{ + // Symbol: s.Symbol, + // Side: types.SideTypeBuy, + // Type: types.OrderTypeLimitMaker, + // Price: kline.Close.Mul(fixedpoint.One.Sub(s.Spread)), + // Quantity: fixedpoint.NewFromFloat(math.Max(math.Min(eq, 0.003), 0.0005)), //0.0005 + // } + // createdOrders, err = orderExecutor.SubmitOrders(ctx, submitOrder) + // if err != nil { + // log.WithError(err).Errorf("can not place orders") + // } + // s.orderStore.Add(createdOrders...) + // s.activeMakerOrders.Add(createdOrders...) + // s.tradeCollector.Process() + } + + }) + + return nil +} + +func tanh(x float64) float64 { + y := (math.Exp(x) - math.Exp(-x)) / (math.Exp(x) + math.Exp(-x)) + return y +} + +func mean(xs []float64) float64 { + return floats.Sum(xs) / float64(len(xs)) +} + +func stddev(xs []float64) float64 { + mu := mean(xs) + squaresum := 0. + for _, x := range xs { + squaresum += (x - mu) * (x - mu) + } + return math.Sqrt(squaresum / float64(len(xs)-1)) +} + +func preprocessing(xs []float64) float64 { + // return 0.5 * tanh(0.01*((xs[len(xs)-1]-mean(xs))/stddev(xs))) // tanh estimator + return tanh((xs[len(xs)-1] - mean(xs)) / stddev(xs)) // tanh z-score + return (xs[len(xs)-1] - mean(xs)) / stddev(xs) // z-score +} diff --git a/pkg/strategy/funding/strategy.go b/pkg/strategy/funding/strategy.go new file mode 100644 index 0000000000..83d0a871df --- /dev/null +++ b/pkg/strategy/funding/strategy.go @@ -0,0 +1,211 @@ +package funding + +import ( + "context" + "errors" + "fmt" + "strings" + + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/exchange/binance" + "github.com/c9s/bbgo/pkg/fixedpoint" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/types" +) + +const ID = "funding" + +var log = logrus.WithField("strategy", ID) + +func init() { + // Register the pointer of the strategy struct, + // so that bbgo knows what struct to be used to unmarshal the configs (YAML or JSON) + // Note: built-in strategies need to imported manually in the bbgo cmd package. + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type Strategy struct { + // These fields will be filled from the config file (it translates YAML to JSON) + Symbol string `json:"symbol"` + Market types.Market `json:"-"` + Quantity fixedpoint.Value `json:"quantity,omitempty"` + MaxExposurePosition fixedpoint.Value `json:"maxExposurePosition"` + //Interval types.Interval `json:"interval"` + + FundingRate *struct { + High fixedpoint.Value `json:"high"` + Neutral fixedpoint.Value `json:"neutral"` + DiffThreshold fixedpoint.Value `json:"diffThreshold"` + } `json:"fundingRate"` + + SupportDetection []struct { + Interval types.Interval `json:"interval"` + // MovingAverageType is the moving average indicator type that we want to use, + // it could be SMA or EWMA + MovingAverageType string `json:"movingAverageType"` + + // MovingAverageInterval is the interval of k-lines for the moving average indicator to calculate, + // it could be "1m", "5m", "1h" and so on. note that, the moving averages are calculated from + // the k-line data we subscribed + //MovingAverageInterval types.Interval `json:"movingAverageInterval"` + // + //// MovingAverageWindow is the number of the window size of the moving average indicator. + //// The number of k-lines in the window. generally used window sizes are 7, 25 and 99 in the TradingView. + //MovingAverageWindow int `json:"movingAverageWindow"` + + MovingAverageIntervalWindow types.IntervalWindow `json:"movingAverageIntervalWindow"` + + MinVolume fixedpoint.Value `json:"minVolume"` + + MinQuoteVolume fixedpoint.Value `json:"minQuoteVolume"` + } `json:"supportDetection"` +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + // session.Subscribe(types.BookChannel, s.Symbol, types.SubscribeOptions{}) + + //session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{ + // Interval: string(s.Interval), + //}) + + for _, detection := range s.SupportDetection { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{ + Interval: detection.Interval, + }) + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{ + Interval: detection.MovingAverageIntervalWindow.Interval, + }) + } +} + +func (s *Strategy) Validate() error { + if len(s.Symbol) == 0 { + return errors.New("symbol is required") + } + + return nil +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + + standardIndicatorSet, ok := session.StandardIndicatorSet(s.Symbol) + if !ok { + return fmt.Errorf("standardIndicatorSet is nil, symbol %s", s.Symbol) + } + //binanceExchange, ok := session.Exchange.(*binance.Exchange) + //if !ok { + // log.Error("exchange failed") + //} + if !session.Futures { + log.Error("futures not enabled in config for this strategy") + return nil + } + + //if s.FundingRate != nil { + // go s.listenToFundingRate(ctx, binanceExchange) + //} + premiumIndex, err := session.Exchange.(*binance.Exchange).QueryPremiumIndex(ctx, s.Symbol) + if err != nil { + log.Error("exchange does not support funding rate api") + } + + var ma types.Float64Indicator + for _, detection := range s.SupportDetection { + + switch strings.ToLower(detection.MovingAverageType) { + case "sma": + ma = standardIndicatorSet.SMA(types.IntervalWindow{ + Interval: detection.MovingAverageIntervalWindow.Interval, + Window: detection.MovingAverageIntervalWindow.Window, + }) + case "ema", "ewma": + ma = standardIndicatorSet.EWMA(types.IntervalWindow{ + Interval: detection.MovingAverageIntervalWindow.Interval, + Window: detection.MovingAverageIntervalWindow.Window, + }) + default: + ma = standardIndicatorSet.EWMA(types.IntervalWindow{ + Interval: detection.MovingAverageIntervalWindow.Interval, + Window: detection.MovingAverageIntervalWindow.Window, + }) + } + + } + + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + // skip k-lines from other symbols + if kline.Symbol != s.Symbol { + return + } + for _, detection := range s.SupportDetection { + var lastMA = ma.Last() + + closePrice := kline.GetClose() + closePriceF := closePrice.Float64() + // skip if the closed price is under the moving average + if closePriceF < lastMA { + log.Infof("skip %s closed price %v < last ma %f", s.Symbol, closePrice, lastMA) + return + } + + fundingRate := premiumIndex.LastFundingRate + + if fundingRate.Compare(s.FundingRate.High) >= 0 { + bbgo.Notify("%s funding rate %s is too high! threshold %s", + s.Symbol, + fundingRate.Percentage(), + s.FundingRate.High.Percentage(), + ) + } else { + log.Infof("skip funding rate is too low") + return + } + + prettyBaseVolume := s.Market.BaseCurrencyFormatter() + prettyQuoteVolume := s.Market.QuoteCurrencyFormatter() + + if detection.MinVolume.Sign() > 0 && kline.Volume.Compare(detection.MinVolume) > 0 { + bbgo.Notify("Detected %s %s resistance base volume %s > min base volume %s, quote volume %s", + s.Symbol, detection.Interval.String(), + prettyBaseVolume.FormatMoney(kline.Volume.Trunc()), + prettyBaseVolume.FormatMoney(detection.MinVolume.Trunc()), + prettyQuoteVolume.FormatMoney(kline.QuoteVolume.Trunc()), + ) + bbgo.Notify(kline) + + baseBalance, ok := session.GetAccount().Balance(s.Market.BaseCurrency) + if !ok { + return + } + + if baseBalance.Available.Sign() > 0 && baseBalance.Total().Compare(s.MaxExposurePosition) < 0 { + log.Infof("opening a short position") + _, err := orderExecutor.SubmitOrders(ctx, types.SubmitOrder{ + Symbol: kline.Symbol, + Side: types.SideTypeSell, + Type: types.OrderTypeMarket, + Quantity: s.Quantity, + }) + if err != nil { + log.WithError(err).Error("submit order error") + } + } + } else if detection.MinQuoteVolume.Sign() > 0 && kline.QuoteVolume.Compare(detection.MinQuoteVolume) > 0 { + bbgo.Notify("Detected %s %s resistance quote volume %s > min quote volume %s, base volume %s", + s.Symbol, detection.Interval.String(), + prettyQuoteVolume.FormatMoney(kline.QuoteVolume.Trunc()), + prettyQuoteVolume.FormatMoney(detection.MinQuoteVolume.Trunc()), + prettyBaseVolume.FormatMoney(kline.Volume.Trunc()), + ) + bbgo.Notify(kline) + } + } + }) + return nil +} diff --git a/pkg/strategy/grid/strategy.go b/pkg/strategy/grid/strategy.go index 2d4252c3af..75e7817831 100644 --- a/pkg/strategy/grid/strategy.go +++ b/pkg/strategy/grid/strategy.go @@ -5,173 +5,423 @@ import ( "fmt" "sync" + "github.com/pkg/errors" "github.com/sirupsen/logrus" "github.com/c9s/bbgo/pkg/bbgo" "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/service" "github.com/c9s/bbgo/pkg/types" + "github.com/c9s/bbgo/pkg/util" ) -var log = logrus.WithField("strategy", "grid") +const ID = "grid" + +var log = logrus.WithField("strategy", ID) + +var NotionalModifier = fixedpoint.NewFromFloat(1.0001) func init() { // Register the pointer of the strategy struct, // so that bbgo knows what struct to be used to unmarshal the configs (YAML or JSON) // Note: built-in strategies need to imported manually in the bbgo cmd package. - bbgo.RegisterStrategy("grid", &Strategy{}) + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +// State is the grid snapshot +type State struct { + Orders []types.SubmitOrder `json:"orders,omitempty"` + FilledBuyGrids map[fixedpoint.Value]struct{} `json:"filledBuyGrids"` + FilledSellGrids map[fixedpoint.Value]struct{} `json:"filledSellGrids"` + Position *types.Position `json:"position,omitempty"` + + AccumulativeArbitrageProfit fixedpoint.Value `json:"accumulativeArbitrageProfit"` + + // any created orders for tracking trades + // [source Order ID] -> arbitrage order + ArbitrageOrders map[uint64]types.Order `json:"arbitrageOrders"` + + ProfitStats types.ProfitStats `json:"profitStats,omitempty"` } type Strategy struct { - // The notification system will be injected into the strategy automatically. - // This field will be injected automatically since it's a single exchange strategy. - *bbgo.Notifiability + *bbgo.Graceful `json:"-" yaml:"-"` - *bbgo.Graceful + *bbgo.Persistence // OrderExecutor is an interface for submitting order. // This field will be injected automatically since it's a single exchange strategy. - bbgo.OrderExecutor - - orderStore *bbgo.OrderStore + bbgo.OrderExecutor `json:"-" yaml:"-"` // Market stores the configuration of the market, for example, VolumePrecision, PricePrecision, MinLotSize... etc // This field will be injected automatically since we defined the Symbol field. - types.Market + types.Market `json:"-" yaml:"-"` + + TradeService *service.TradeService `json:"-" yaml:"-"` // These fields will be filled from the config file (it translates YAML to JSON) - Symbol string `json:"symbol"` + Symbol string `json:"symbol" yaml:"symbol"` // ProfitSpread is the fixed profit spread you want to submit the sell order - ProfitSpread fixedpoint.Value `json:"profitSpread"` + ProfitSpread fixedpoint.Value `json:"profitSpread" yaml:"profitSpread"` // GridNum is the grid number, how many orders you want to post on the orderbook. - GridNum int `json:"gridNumber"` + GridNum int64 `json:"gridNumber" yaml:"gridNumber"` - UpperPrice fixedpoint.Value `json:"upperPrice"` + UpperPrice fixedpoint.Value `json:"upperPrice" yaml:"upperPrice"` - LowerPrice fixedpoint.Value `json:"lowerPrice"` + LowerPrice fixedpoint.Value `json:"lowerPrice" yaml:"lowerPrice"` // Quantity is the quantity you want to submit for each order. - Quantity float64 `json:"quantity"` + Quantity fixedpoint.Value `json:"quantity,omitempty"` + + // QuantityScale helps user to define the quantity by price scale or volume scale + QuantityScale *bbgo.PriceVolumeScale `json:"quantityScale,omitempty"` + + // FixedAmount is used for fixed amount (dynamic quantity) if you don't want to use fixed quantity. + FixedAmount fixedpoint.Value `json:"amount,omitempty" yaml:"amount"` - // OrderAmount is used for fixed amount (dynamic quantity) if you don't want to use fixed quantity. - OrderAmount fixedpoint.Value `json:"orderAmount"` + // Side is the initial maker orders side. defaults to "both" + Side types.SideType `json:"side" yaml:"side"` + + // CatchUp let the maker grid catch up with the price change. + CatchUp bool `json:"catchUp" yaml:"catchUp"` // Long means you want to hold more base asset than the quote asset. - Long bool `json:"long"` + Long bool `json:"long,omitempty" yaml:"long,omitempty"` + + state *State + + // orderStore is used to store all the created orders, so that we can filter the trades. + orderStore *bbgo.OrderStore // activeOrders is the locally maintained active order book of the maker orders. - activeOrders *bbgo.LocalActiveOrderBook + activeOrders *bbgo.ActiveOrderBook - position fixedpoint.Value + tradeCollector *bbgo.TradeCollector - // any created orders for tracking trades - orders map[uint64]types.Order + // groupID is the group ID used for the strategy instance for canceling orders + groupID uint32 } -func (s *Strategy) placeGridOrders(orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) { - log.Infof("placing grid orders...") +func (s *Strategy) ID() string { + return ID +} - quoteCurrency := s.Market.QuoteCurrency - balances := session.Account.Balances() +func (s *Strategy) Validate() error { + if s.UpperPrice.IsZero() { + return errors.New("upperPrice can not be zero, you forgot to set?") + } + if s.LowerPrice.IsZero() { + return errors.New("lowerPrice can not be zero, you forgot to set?") + } + if s.UpperPrice.Compare(s.LowerPrice) <= 0 { + return fmt.Errorf("upperPrice (%s) should not be less than or equal to lowerPrice (%s)", s.UpperPrice.String(), s.LowerPrice.String()) + } + if s.ProfitSpread.Sign() <= 0 { + // If profitSpread is empty or its value is negative + return fmt.Errorf("profit spread should bigger than 0") + } + + if s.Quantity.IsZero() && s.QuantityScale == nil && s.FixedAmount.IsZero() { + return fmt.Errorf("amount, quantity or scaleQuantity can not be zero") + } + + return nil +} + +func (s *Strategy) generateGridSellOrders(session *bbgo.ExchangeSession) ([]types.SubmitOrder, error) { currentPrice, ok := session.LastPrice(s.Symbol) if !ok { - log.Warn("last price not found, skipping") - return + return nil, fmt.Errorf("can not generate sell orders, %s last price not found", s.Symbol) } - currentPriceF := fixedpoint.NewFromFloat(currentPrice) - priceRange := s.UpperPrice - s.LowerPrice - gridSize := priceRange.Div(fixedpoint.NewFromInt(s.GridNum)) + if currentPrice.Compare(s.UpperPrice) > 0 { + return nil, fmt.Errorf("can not generate sell orders, the current price %s is higher than upper price %s", currentPrice.String(), s.UpperPrice.String()) + } + + priceRange := s.UpperPrice.Sub(s.LowerPrice) + numGrids := fixedpoint.NewFromInt(s.GridNum) + gridSpread := priceRange.Div(numGrids) + + if gridSpread.IsZero() { + return nil, fmt.Errorf( + "either numGrids(%v) is too big or priceRange(%v) is too small, "+ + "the differences of grid prices become zero", numGrids, priceRange) + } + + // find the nearest grid price from the current price + startPrice := fixedpoint.Max( + s.LowerPrice, + s.UpperPrice.Sub( + s.UpperPrice.Sub(currentPrice).Div(gridSpread).Trunc().Mul(gridSpread))) - var bidOrders []types.SubmitOrder - var askOrders []types.SubmitOrder + if startPrice.Compare(s.UpperPrice) > 0 { + return nil, fmt.Errorf("current price %v exceeded the upper price boundary %v", + currentPrice, + s.UpperPrice) + } + balances := session.GetAccount().Balances() baseBalance, ok := balances[s.Market.BaseCurrency] - if ok && baseBalance.Available > 0 { - log.Infof("placing sell order from %f ~ %f per grid %f", (currentPriceF + gridSize).Float64(), s.UpperPrice.Float64(), gridSize.Float64()) - for price := currentPriceF + gridSize; price <= s.UpperPrice; price += gridSize { - order := types.SubmitOrder{ - Symbol: s.Symbol, - Side: types.SideTypeSell, - Type: types.OrderTypeLimit, - Market: s.Market, - Quantity: s.Quantity, - Price: price.Float64(), - TimeInForce: "GTC", + if !ok { + return nil, fmt.Errorf("base balance %s not found", s.Market.BaseCurrency) + } + + if baseBalance.Available.IsZero() { + return nil, fmt.Errorf("base balance %s is zero: %s", + s.Market.BaseCurrency, baseBalance.String()) + } + + log.Infof("placing grid sell orders from %s ~ %s, grid spread %s", + startPrice.String(), + s.UpperPrice.String(), + gridSpread.String()) + + var orders []types.SubmitOrder + for price := startPrice; price.Compare(s.UpperPrice) <= 0; price = price.Add(gridSpread) { + var quantity fixedpoint.Value + if s.Quantity.Sign() > 0 { + quantity = s.Quantity + } else if s.QuantityScale != nil { + qf, err := s.QuantityScale.Scale(price.Float64(), 0) + if err != nil { + return nil, err } - askOrders = append(askOrders, order) + quantity = fixedpoint.NewFromFloat(qf) + } else if s.FixedAmount.Sign() > 0 { + quantity = s.FixedAmount.Div(price) + } + + // quoteQuantity := price.Mul(quantity) + if baseBalance.Available.Compare(quantity) < 0 { + return orders, fmt.Errorf("base balance %s %s is not enough, stop generating sell orders", + baseBalance.Currency, + baseBalance.Available.String()) } - } else { - log.Warnf("base balance is not enough, we can't place ask orders") + + if _, filled := s.state.FilledSellGrids[price]; filled { + log.Debugf("sell grid at price %s is already filled, skipping", price.String()) + continue + } + + orders = append(orders, types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeSell, + Type: types.OrderTypeLimit, + Market: s.Market, + Quantity: quantity, + Price: price.Add(s.ProfitSpread), + TimeInForce: types.TimeInForceGTC, + GroupID: s.groupID, + }) + baseBalance.Available = baseBalance.Available.Sub(quantity) + + s.state.FilledSellGrids[price] = struct{}{} + } + + return orders, nil +} + +func (s *Strategy) generateGridBuyOrders(session *bbgo.ExchangeSession) ([]types.SubmitOrder, error) { + // session.Exchange.QueryTicker() + currentPrice, ok := session.LastPrice(s.Symbol) + if !ok { + return nil, fmt.Errorf("%s last price not found, skipping", s.Symbol) + } + + if currentPrice.Compare(s.LowerPrice) < 0 { + return nil, fmt.Errorf("current price %v is lower than the lower price %v", + currentPrice, s.LowerPrice) + } + + priceRange := s.UpperPrice.Sub(s.LowerPrice) + numGrids := fixedpoint.NewFromInt(s.GridNum) + gridSpread := priceRange.Div(numGrids) + + if gridSpread.IsZero() { + return nil, fmt.Errorf( + "either numGrids(%v) is too big or priceRange(%v) is too small, "+ + "the differences of grid prices become zero", numGrids, priceRange) + } + + // Find the nearest grid price for placing buy orders: + // buyRange = currentPrice - lowerPrice + // numOfBuyGrids = Floor(buyRange / gridSpread) + // startPrice = lowerPrice + numOfBuyGrids * gridSpread + // priceOfBuyOrder1 = startPrice + // priceOfBuyOrder2 = startPrice - gridSpread + // priceOfBuyOrder3 = startPrice - gridSpread * 2 + startPrice := fixedpoint.Min( + s.UpperPrice, + s.LowerPrice.Add( + currentPrice.Sub(s.LowerPrice).Div(gridSpread).Trunc().Mul(gridSpread))) + + if startPrice.Compare(s.LowerPrice) < 0 { + return nil, fmt.Errorf("current price %v exceeded the lower price boundary %v", + currentPrice, + s.UpperPrice) + } + + balances := session.GetAccount().Balances() + balance, ok := balances[s.Market.QuoteCurrency] + if !ok { + return nil, fmt.Errorf("quote balance %s not found", s.Market.QuoteCurrency) } - quoteBalance, ok := balances[quoteCurrency] - if ok && quoteBalance.Available > 0 { - log.Infof("placing buy order from %f ~ %f per grid %f", (currentPriceF - gridSize).Float64(), s.LowerPrice.Float64(), gridSize.Float64()) + if balance.Available.IsZero() { + return nil, fmt.Errorf("quote balance %s is zero: %v", s.Market.QuoteCurrency, balance) + } - for price := currentPriceF - gridSize; price >= s.LowerPrice; price -= gridSize { - order := types.SubmitOrder{ - Symbol: s.Symbol, - Side: types.SideTypeBuy, - Type: types.OrderTypeLimit, - Market: s.Market, - Quantity: s.Quantity, - Price: price.Float64(), - TimeInForce: "GTC", + log.Infof("placing grid buy orders from %v to %v, grid spread %v", + startPrice, + s.LowerPrice, + gridSpread) + + var orders []types.SubmitOrder + for price := startPrice; s.LowerPrice.Compare(price) <= 0; price = price.Sub(gridSpread) { + var quantity fixedpoint.Value + if s.Quantity.Sign() > 0 { + quantity = s.Quantity + } else if s.QuantityScale != nil { + qf, err := s.QuantityScale.Scale(price.Float64(), 0) + if err != nil { + return nil, err } - bidOrders = append(bidOrders, order) + quantity = fixedpoint.NewFromFloat(qf) + } else if s.FixedAmount.Sign() > 0 { + quantity = s.FixedAmount.Div(price) + } + + quoteQuantity := price.Mul(quantity) + if balance.Available.Compare(quoteQuantity) < 0 { + return orders, fmt.Errorf("quote balance %s %v is not enough for %v, stop generating buy orders", + balance.Currency, + balance.Available, + quoteQuantity) + } + + if _, filled := s.state.FilledBuyGrids[price]; filled { + log.Debugf("buy grid at price %v is already filled, skipping", price) + continue } - } else { - log.Warnf("quote balance is not enough, we can't place bid orders") + + orders = append(orders, types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeBuy, + Type: types.OrderTypeLimit, + Market: s.Market, + Quantity: quantity, + Price: price, + TimeInForce: types.TimeInForceGTC, + GroupID: s.groupID, + }) + balance.Available = balance.Available.Sub(quoteQuantity) + + s.state.FilledBuyGrids[price] = struct{}{} } - createdOrders, err := orderExecutor.SubmitOrders(context.Background(), append(bidOrders, askOrders...)...) - if err != nil { - log.WithError(err).Errorf("can not place orders") - return + return orders, nil +} + +func (s *Strategy) placeGridSellOrders(orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + orderForms, err := s.generateGridSellOrders(session) + + if len(orderForms) == 0 { + if err != nil { + return err + } + + return errors.New("none of sell order is generated") } + log.Infof("submitting %d sell orders...", len(orderForms)) + createdOrders, err := orderExecutor.SubmitOrders(context.Background(), orderForms...) s.activeOrders.Add(createdOrders...) + return err } -func (s *Strategy) tradeUpdateHandler(trade types.Trade) { - if trade.Symbol != s.Symbol { - return - } +func (s *Strategy) placeGridBuyOrders(orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + orderForms, err := s.generateGridBuyOrders(session) - if s.orderStore.Exists(trade.OrderID) { - log.Infof("received trade update of order %d: %+v", trade.OrderID, trade) - switch trade.Side { - case types.SideTypeBuy: - s.position.AtomicAdd(fixedpoint.NewFromFloat(trade.Quantity)) - case types.SideTypeSell: - s.position.AtomicAdd(-fixedpoint.NewFromFloat(trade.Quantity)) + if len(orderForms) == 0 { + if err != nil { + return err } + + return errors.New("none of buy order is generated") } + + log.Infof("submitting %d buy orders...", len(orderForms)) + createdOrders, err := orderExecutor.SubmitOrders(context.Background(), orderForms...) + s.activeOrders.Add(createdOrders...) + + return err } -func (s *Strategy) submitReverseOrder(order types.Order) { - var side = order.Side.Reverse() - var price = order.Price - var quantity = order.Quantity +func (s *Strategy) placeGridOrders(orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) { + log.Infof("placing grid orders on side %s...", s.Side) + + switch s.Side { + case types.SideTypeBuy: + if err := s.placeGridBuyOrders(orderExecutor, session); err != nil { + log.Warn(err.Error()) + } + + case types.SideTypeSell: + if err := s.placeGridSellOrders(orderExecutor, session); err != nil { + log.Warn(err.Error()) + } + + case types.SideTypeBoth: + if err := s.placeGridSellOrders(orderExecutor, session); err != nil { + log.Warn(err.Error()) + } + + if err := s.placeGridBuyOrders(orderExecutor, session); err != nil { + log.Warn(err.Error()) + } + + default: + log.Errorf("invalid side %s", s.Side) + } +} + +func (s *Strategy) handleFilledOrder(filledOrder types.Order) { + // generate arbitrage order + var side = filledOrder.Side.Reverse() + var price = filledOrder.Price + var quantity = filledOrder.Quantity + var amount = filledOrder.Price.Mul(filledOrder.Quantity) switch side { case types.SideTypeSell: - price += s.ProfitSpread.Float64() + price = price.Add(s.ProfitSpread) case types.SideTypeBuy: - price -= s.ProfitSpread.Float64() + price = price.Sub(s.ProfitSpread) } - if s.OrderAmount > 0 { - quantity = s.OrderAmount.Float64() / price + if s.FixedAmount.Sign() > 0 { + quantity = s.FixedAmount.Div(price) } else if s.Long { // long = use the same amount to buy more quantity back - // the original amount - var amount = order.Price * order.Quantity - quantity = amount / price + quantity = amount.Div(price) + amount = quantity.Mul(price) + } + + if quantity.Compare(s.Market.MinQuantity) < 0 { + quantity = s.Market.MinQuantity + amount = quantity.Mul(price) + } + + if amount.Compare(s.Market.MinNotional) <= 0 { + quantity = bbgo.AdjustFloatQuantityByMinAmount( + quantity, price, s.Market.MinNotional.Mul(NotionalModifier)) + + // update amount + amount = quantity.Mul(price) } submitOrder := types.SubmitOrder{ @@ -180,56 +430,237 @@ func (s *Strategy) submitReverseOrder(order types.Order) { Type: types.OrderTypeLimit, Quantity: quantity, Price: price, - TimeInForce: "GTC", + TimeInForce: types.TimeInForceGTC, + GroupID: s.groupID, } - log.Infof("submitting reverse order: %s against %s", submitOrder.String(), order.String()) + log.Infof("submitting arbitrage order: %v against filled order %v", submitOrder, filledOrder) createdOrders, err := s.OrderExecutor.SubmitOrders(context.Background(), submitOrder) - if err != nil { - log.WithError(err).Errorf("can not place orders") - return + + // create one-way link from the newly created orders + for _, o := range createdOrders { + s.state.ArbitrageOrders[o.OrderID] = filledOrder } s.orderStore.Add(createdOrders...) s.activeOrders.Add(createdOrders...) + + if err != nil { + log.WithError(err).Errorf("can not place orders: %+v", submitOrder) + return + } + + // calculate arbitrage profit + // TODO: apply fee rate here + if s.Long { + switch filledOrder.Side { + case types.SideTypeSell: + if buyOrder, ok := s.state.ArbitrageOrders[filledOrder.OrderID]; ok { + // use base asset quantity here + baseProfit := buyOrder.Quantity.Sub(filledOrder.Quantity) + s.state.AccumulativeArbitrageProfit = s.state.AccumulativeArbitrageProfit. + Add(baseProfit) + bbgo.Notify("%s grid arbitrage profit %v %s, accumulative arbitrage profit %v %s", + s.Symbol, + baseProfit, s.Market.BaseCurrency, + s.state.AccumulativeArbitrageProfit, s.Market.BaseCurrency, + ) + } + + case types.SideTypeBuy: + if sellOrder, ok := s.state.ArbitrageOrders[filledOrder.OrderID]; ok { + // use base asset quantity here + baseProfit := filledOrder.Quantity.Sub(sellOrder.Quantity) + s.state.AccumulativeArbitrageProfit = s.state.AccumulativeArbitrageProfit.Add(baseProfit) + bbgo.Notify("%s grid arbitrage profit %v %s, accumulative arbitrage profit %v %s", + s.Symbol, + baseProfit, s.Market.BaseCurrency, + s.state.AccumulativeArbitrageProfit, s.Market.BaseCurrency, + ) + } + } + } else if !s.Long && s.Quantity.Sign() > 0 { + switch filledOrder.Side { + case types.SideTypeSell: + if buyOrder, ok := s.state.ArbitrageOrders[filledOrder.OrderID]; ok { + // use base asset quantity here + quoteProfit := filledOrder.Quantity.Mul(filledOrder.Price).Sub( + buyOrder.Quantity.Mul(buyOrder.Price)) + s.state.AccumulativeArbitrageProfit = s.state.AccumulativeArbitrageProfit.Add(quoteProfit) + bbgo.Notify("%s grid arbitrage profit %v %s, accumulative arbitrage profit %v %s", + s.Symbol, + quoteProfit, s.Market.QuoteCurrency, + s.state.AccumulativeArbitrageProfit, s.Market.QuoteCurrency, + ) + } + case types.SideTypeBuy: + if sellOrder, ok := s.state.ArbitrageOrders[filledOrder.OrderID]; ok { + // use base asset quantity here + quoteProfit := sellOrder.Quantity.Mul(sellOrder.Price). + Sub(filledOrder.Quantity.Mul(filledOrder.Price)) + s.state.AccumulativeArbitrageProfit = s.state.AccumulativeArbitrageProfit.Add(quoteProfit) + bbgo.Notify("%s grid arbitrage profit %v %s, accumulative arbitrage profit %v %s", s.Symbol, + quoteProfit, s.Market.QuoteCurrency, + s.state.AccumulativeArbitrageProfit, s.Market.QuoteCurrency, + ) + } + } + } } func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: "1m"}) } +func (s *Strategy) LoadState() error { + instanceID := s.InstanceID() + + var state State + if s.Persistence != nil { + if err := s.Persistence.Load(&state, ID, instanceID); err != nil { + if err != service.ErrPersistenceNotExists { + return errors.Wrapf(err, "state load error") + } + + s.state = &State{ + FilledBuyGrids: make(map[fixedpoint.Value]struct{}), + FilledSellGrids: make(map[fixedpoint.Value]struct{}), + ArbitrageOrders: make(map[uint64]types.Order), + Position: types.NewPositionFromMarket(s.Market), + } + } else { + s.state = &state + } + } + + // init profit stats + s.state.ProfitStats.Init(s.Market) + + // field guards + if s.state.ArbitrageOrders == nil { + s.state.ArbitrageOrders = make(map[uint64]types.Order) + } + if s.state.FilledBuyGrids == nil { + s.state.FilledBuyGrids = make(map[fixedpoint.Value]struct{}) + } + if s.state.FilledSellGrids == nil { + s.state.FilledSellGrids = make(map[fixedpoint.Value]struct{}) + } + + return nil +} + +func (s *Strategy) SaveState() error { + if s.Persistence != nil { + log.Infof("backing up grid state...") + + instanceID := s.InstanceID() + submitOrders := s.activeOrders.Backup() + s.state.Orders = submitOrders + + if err := s.Persistence.Save(s.state, ID, instanceID); err != nil { + return err + } + } + return nil +} + +// InstanceID returns the instance identifier from the current grid configuration parameters +func (s *Strategy) InstanceID() string { + return fmt.Sprintf("%s-%s-%d-%d-%d", ID, s.Symbol, s.GridNum, s.UpperPrice.Int(), s.LowerPrice.Int()) +} + func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + // do some basic validation if s.GridNum == 0 { s.GridNum = 10 } - if s.UpperPrice <= s.LowerPrice { - return fmt.Errorf("upper price (%f) should not be less than lower price (%f)", s.UpperPrice.Float64(), s.LowerPrice.Float64()) + if s.Side == "" { + s.Side = types.SideTypeBoth + } + + instanceID := s.InstanceID() + s.groupID = util.FNV32(instanceID) + log.Infof("using group id %d from fnv(%s)", s.groupID, instanceID) + + if err := s.LoadState(); err != nil { + return err } + bbgo.Notify("grid %s position", s.Symbol, s.state.Position) + s.orderStore = bbgo.NewOrderStore(s.Symbol) - s.orderStore.BindStream(session.Stream) + s.orderStore.BindStream(session.UserDataStream) // we don't persist orders so that we can not clear the previous orders for now. just need time to support this. - s.activeOrders = bbgo.NewLocalActiveOrderBook() - s.activeOrders.OnFilled(s.submitReverseOrder) - s.activeOrders.BindStream(session.Stream) + s.activeOrders = bbgo.NewActiveOrderBook(s.Symbol) + s.activeOrders.OnFilled(s.handleFilledOrder) + s.activeOrders.BindStream(session.UserDataStream) + + s.tradeCollector = bbgo.NewTradeCollector(s.Symbol, s.state.Position, s.orderStore) + + s.tradeCollector.OnTrade(func(trade types.Trade, profit, netProfit fixedpoint.Value) { + bbgo.Notify(trade) + s.state.ProfitStats.AddTrade(trade) + }) + + /* + if s.TradeService != nil { + s.tradeCollector.OnTrade(func(trade types.Trade) { + if err := s.TradeService.Mark(ctx, trade.ID, ID); err != nil { + log.WithError(err).Error("trade mark error") + } + }) + } + */ + + s.tradeCollector.OnPositionUpdate(func(position *types.Position) { + bbgo.Notify(position) + }) + s.tradeCollector.BindStream(session.UserDataStream) s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { defer wg.Done() - log.Infof("canceling active orders...") + if err := s.SaveState(); err != nil { + log.WithError(err).Errorf("can not save state: %+v", s.state) + } else { + bbgo.Notify("%s: %s grid is saved", ID, s.Symbol) + } - if err := session.Exchange.CancelOrders(ctx, s.activeOrders.Orders()...); err != nil { + // now we can cancel the open orders + log.Infof("canceling active orders...") + if err := session.Exchange.CancelOrders(context.Background(), s.activeOrders.Orders()...); err != nil { log.WithError(err).Errorf("cancel order error") } }) - session.Stream.OnTradeUpdate(s.tradeUpdateHandler) - session.Stream.OnConnect(func() { - s.placeGridOrders(orderExecutor, session) + session.UserDataStream.OnStart(func() { + // if we have orders in the state data, we can restore them + if len(s.state.Orders) > 0 { + bbgo.Notify("restoring %s %d grid orders...", s.Symbol, len(s.state.Orders)) + + createdOrders, err := orderExecutor.SubmitOrders(ctx, s.state.Orders...) + if err != nil { + log.WithError(err).Error("active orders restore error") + } + s.activeOrders.Add(createdOrders...) + s.orderStore.Add(createdOrders...) + } else { + // or place new orders + s.placeGridOrders(orderExecutor, session) + } }) + if s.CatchUp { + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + log.Infof("catchUp mode is enabled, updating grid orders...") + // update grid + s.placeGridOrders(orderExecutor, session) + }) + } + return nil } diff --git a/pkg/strategy/infinity-grid/strategy.go b/pkg/strategy/infinity-grid/strategy.go new file mode 100644 index 0000000000..ac4ee781fb --- /dev/null +++ b/pkg/strategy/infinity-grid/strategy.go @@ -0,0 +1,529 @@ +package infinity_grid + +import ( + "context" + "fmt" + "math" + "sync" + + "github.com/pkg/errors" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/service" + "github.com/c9s/bbgo/pkg/types" + "github.com/c9s/bbgo/pkg/util" + "github.com/sirupsen/logrus" +) + +const ID = "infinity-grid" + +var log = logrus.WithField("strategy", ID) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +// State is the grid snapshot +type State struct { + Orders []types.SubmitOrder `json:"orders,omitempty"` + FilledBuyGrids map[fixedpoint.Value]struct{} `json:"filledBuyGrids"` + FilledSellGrids map[fixedpoint.Value]struct{} `json:"filledSellGrids"` + Position *types.Position `json:"position,omitempty"` + + ProfitStats types.ProfitStats `json:"profitStats,omitempty"` +} + +type Strategy struct { + // The notification system will be injected into the strategy automatically. + // This field will be injected automatically since it's a single exchange strategy. + *bbgo.Notifiability `json:"-" yaml:"-"` + + *bbgo.Graceful `json:"-" yaml:"-"` + + *bbgo.Persistence + + // OrderExecutor is an interface for submitting order. + // This field will be injected automatically since it's a single exchange strategy. + bbgo.OrderExecutor `json:"-" yaml:"-"` + + // Market stores the configuration of the market, for example, VolumePrecision, PricePrecision, MinLotSize... etc + // This field will be injected automatically since we defined the Symbol field. + types.Market `json:"-" yaml:"-"` + + // These fields will be filled from the config file (it translates YAML to JSON) + Symbol string `json:"symbol" yaml:"symbol"` + + LowerPrice fixedpoint.Value `json:"lowerPrice" yaml:"lowerPrice"` + + // Buy-Sell Margin for each pair of orders + Margin fixedpoint.Value `json:"margin"` + + // Quantity is the quantity you want to submit for each order. + Quantity fixedpoint.Value `json:"quantity"` + + InitialOrderQuantity fixedpoint.Value `json:"initialOrderQuantity"` + CountOfMoreOrders int `json:"countOfMoreOrders"` + + // GridNum is the grid number, how many orders you want to post on the orderbook. + GridNum int64 `json:"gridNumber" yaml:"gridNumber"` + + // Side is the initial maker orders side. defaults to "both" + Side types.SideType `json:"side" yaml:"side"` + + // Long means you want to hold more base asset than the quote asset. + Long bool `json:"long,omitempty" yaml:"long,omitempty"` + + state *State + + // orderStore is used to store all the created orders, so that we can filter the trades. + orderStore *bbgo.OrderStore + + // activeOrders is the locally maintained active order book of the maker orders. + activeOrders *bbgo.ActiveOrderBook + + tradeCollector *bbgo.TradeCollector + + currentUpperGrid int + currentLowerGrid int + + // groupID is the group ID used for the strategy instance for canceling orders + groupID uint32 +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) Validate() error { + if s.LowerPrice.IsZero() { + return errors.New("lowerPrice can not be zero, you forgot to set?") + } + + if s.Margin.Sign() <= 0 { + // If margin is empty or its value is negative + return fmt.Errorf("Margin should bigger than 0") + } + + if s.Quantity.IsZero() { + return fmt.Errorf("Quantity can not be zero") + } + + return nil +} + +func (s *Strategy) placeInfiniteGridOrders(orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) { + balances := session.Account.Balances() + log.Infof("Balances: %s", balances.String()) + log.Infof("Base currency: %s", s.Market.BaseCurrency) // BTC + log.Infof("Quote currency: %s", s.Market.QuoteCurrency) // USD + baseBalance, ok := balances[s.Market.BaseCurrency] + if !ok { + log.Errorf("base balance %s not found", s.Market.BaseCurrency) + return + } + if s.currentUpperGrid != 0 || s.currentLowerGrid != 0 { + // reconnect, do not place orders + return + } + + quoteBalance, ok := balances[s.Market.QuoteCurrency] + if !ok || quoteBalance.Available.Compare(fixedpoint.Zero) < 0 { // check available USD in balance + log.Errorf("quote balance %s not found", s.Market.QuoteCurrency) + return + } + + var orders []types.SubmitOrder + var quantityF fixedpoint.Value + currentPrice, ok := session.LastPrice(s.Symbol) + if !ok { + return + } + + quantityF = s.Quantity + if s.InitialOrderQuantity.Compare(fixedpoint.Zero) > 0 { + quantityF = s.InitialOrderQuantity + // Buy half of value of asset + order := types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeBuy, + Type: types.OrderTypeMarket, + Market: s.Market, + Quantity: quantityF, + Price: currentPrice, + TimeInForce: types.TimeInForceGTC, + GroupID: s.groupID, + } + log.Infof("submitting init order: %s", order.String()) + orders = append(orders, order) + + baseBalance.Available = baseBalance.Available.Add(quantityF) + //createdOrders, err := orderExecutor.SubmitOrders(context.Background(), order) + //if err != nil { + //log.WithError(err).Errorf("can not place init order") + //return + //} + + //s.activeOrders.Add(createdOrders...) + //s.orderStore.Add(createdOrders...) + } + + // Sell Side + j := 1 + for i := int64(1); i <= s.GridNum/2; i++ { + price := fixedpoint.NewFromFloat(currentPrice.Float64() * math.Pow((1.0+s.Margin.Float64()), float64(j))) + j++ + if price.Compare(s.LowerPrice) < 0 { + i-- + continue + } + + quantity := s.Quantity + //quoteQuantity := price.Mul(quantity) + if baseBalance.Available.Compare(quantity) < 0 { + log.Errorf("base balance %s %s is not enough, stop generating sell orders", + baseBalance.Currency, + baseBalance.Available.String()) + break + } + if _, filled := s.state.FilledSellGrids[price]; filled { + log.Debugf("sell grid at price %v is already filled, skipping", price) + continue + } + order := types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeSell, + Type: types.OrderTypeLimit, + Market: s.Market, + Quantity: quantity, + Price: price, + TimeInForce: types.TimeInForceGTC, + GroupID: s.groupID, + } + log.Infof("%d) submitting order: %s", i, order.String()) + orders = append(orders, order) + baseBalance.Available = baseBalance.Available.Sub(quantity) + + s.state.FilledSellGrids[price] = struct{}{} + s.currentUpperGrid++ + } + + // Buy Side + for i := int64(1); i <= s.GridNum/2; i++ { + price := fixedpoint.NewFromFloat(currentPrice.Float64() * math.Pow((1.0-s.Margin.Float64()), float64(i))) + + if price.Compare(s.LowerPrice) < 0 { + break + } + + quantity := s.Quantity + quoteQuantity := price.Mul(quantity) + if quoteBalance.Available.Compare(quoteQuantity) < 0 { + log.Errorf("quote balance %s %v is not enough for %v, stop generating buy orders", + quoteBalance.Currency, + quoteBalance.Available, + quoteQuantity) + break + } + if _, filled := s.state.FilledBuyGrids[price]; filled { + log.Debugf("buy grid at price %v is already filled, skipping", price) + continue + } + order := types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeBuy, + Type: types.OrderTypeLimit, + Market: s.Market, + Quantity: quantity, + Price: price, + TimeInForce: types.TimeInForceGTC, + GroupID: s.groupID, + } + log.Infof("%d) submitting order: %s", i, order.String()) + orders = append(orders, order) + + quoteBalance.Available = quoteBalance.Available.Sub(quoteQuantity) + + s.state.FilledBuyGrids[price] = struct{}{} + s.currentLowerGrid++ + } + + createdOrders, err := orderExecutor.SubmitOrders(context.Background(), orders...) + if err != nil { + log.WithError(err).Errorf("can not place orders") + return + } + + s.activeOrders.Add(createdOrders...) + s.orderStore.Add(createdOrders...) +} + +func (s *Strategy) submitFollowingOrder(order types.Order) { + var side = order.Side.Reverse() + var orders []types.SubmitOrder + var cancelOrders []types.Order + var price fixedpoint.Value + var quantity = order.Quantity + const earlyPlacedCount = 2 + + if order.Quantity.Eq(s.InitialOrderQuantity) { + return + } + + switch side { + case types.SideTypeSell: + price = order.Price.Mul(fixedpoint.NewFromFloat(1.0).Add(s.Margin)) + s.currentUpperGrid++ + s.currentLowerGrid-- + if s.Long { + quantity = s.Quantity + } + + case types.SideTypeBuy: + price = order.Price.Mul(fixedpoint.NewFromFloat(1.0).Sub(s.Margin)) + if price.Compare(s.LowerPrice) < 0 { + return + } + if s.Long { + var amount = order.Price.Mul(order.Quantity) + quantity = amount.Div(price) + } + s.currentUpperGrid-- + s.currentLowerGrid++ + } + + submitOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: side, + Type: types.OrderTypeLimit, + Market: s.Market, + Quantity: quantity, + Price: price, + TimeInForce: types.TimeInForceGTC, + GroupID: s.groupID, + } + + if price.Compare(s.LowerPrice) >= 0 { + log.Infof("→submitting following order: %s, currentUpperGrid: %d, currentLowerGrid: %d", submitOrder.String(), s.currentUpperGrid, s.currentLowerGrid) + orders = append(orders, submitOrder) + } + + if order.Side == types.SideTypeSell && s.currentUpperGrid <= earlyPlacedCount { + // Plase a more higher order + for i := 1; i <= s.CountOfMoreOrders; i++ { + price = order.Price.MulPow(fixedpoint.NewFromFloat(1.0).Add(s.Margin), fixedpoint.NewFromInt(int64(i+earlyPlacedCount))) + submitOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: order.Side, + Market: s.Market, + Type: types.OrderTypeLimit, + Quantity: s.Quantity, + Price: price, + TimeInForce: types.TimeInForceGTC, + GroupID: s.groupID, + } + + orders = append(orders, submitOrder) + s.currentUpperGrid++ + log.Infof("submitting new higher order: %s, currentUpperGrid: %d", submitOrder.String(), s.currentUpperGrid) + } + // Cleanup overabundant order limits + lowerGridPrice := order.Price.MulPow(fixedpoint.NewFromFloat(1.0).Sub(s.Margin), fixedpoint.NewFromInt(int64(s.GridNum))) + for _, cancelOrder := range s.activeOrders.Orders() { + if cancelOrder.Side == types.SideTypeSell { + continue + } + if cancelOrder.Price.Compare(lowerGridPrice) < 0 { + cancelOrders = append(cancelOrders, cancelOrder) + } + } + log.Infof("cleanup %d the lowest orders", len(cancelOrders)) + s.currentLowerGrid -= len(cancelOrders) + s.OrderExecutor.CancelOrders(context.Background(), cancelOrders...) + } + + if order.Side == types.SideTypeBuy && s.currentLowerGrid <= earlyPlacedCount { + // Plase a more lower order + for i := 1; i <= s.CountOfMoreOrders; i++ { + price = order.Price.MulPow(fixedpoint.NewFromFloat(1.0).Sub(s.Margin), fixedpoint.NewFromInt(int64(i+earlyPlacedCount))) + + if price.Compare(s.LowerPrice) < 0 { + break + } + + submitOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: order.Side, + Market: s.Market, + Type: types.OrderTypeLimit, + Quantity: s.Quantity, + Price: price, + TimeInForce: types.TimeInForceGTC, + GroupID: s.groupID, + } + + orders = append(orders, submitOrder) + s.currentLowerGrid++ + log.Infof("submitting new lower order: %s, currentLowerGrid: %d", submitOrder.String(), s.currentLowerGrid) + } + // Cleanup overabundant order limits + upperGridPrice := order.Price.MulPow(fixedpoint.NewFromFloat(1.0).Add(s.Margin), fixedpoint.NewFromInt(int64(s.GridNum))) + for _, cancelOrder := range s.activeOrders.Orders() { + if cancelOrder.Side == types.SideTypeBuy { + continue + } + if cancelOrder.Price.Compare(upperGridPrice) > 0 { + cancelOrders = append(cancelOrders, cancelOrder) + } + } + log.Infof("cleanup %d the highest orders", len(cancelOrders)) + s.currentUpperGrid -= len(cancelOrders) + s.OrderExecutor.CancelOrders(context.Background(), cancelOrders...) + } + + createdOrders, err := s.OrderExecutor.SubmitOrders(context.Background(), orders...) + if err != nil { + log.WithError(err).Errorf("can not place orders") + return + } + + s.activeOrders.Add(createdOrders...) +} + +func (s *Strategy) handleFilledOrder(order types.Order) { + if order.Symbol != s.Symbol { + return + } + + //s.Notifiability.Notify("order filled: %s", order.String()) + s.submitFollowingOrder(order) +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: "1m"}) +} + +func (s *Strategy) LoadState() error { + instanceID := s.InstanceID() + + var state State + if s.Persistence != nil { + if err := s.Persistence.Load(&state, ID, instanceID); err != nil { + if err != service.ErrPersistenceNotExists { + return errors.Wrapf(err, "state load error") + } + + s.state = &State{ + FilledBuyGrids: make(map[fixedpoint.Value]struct{}), + FilledSellGrids: make(map[fixedpoint.Value]struct{}), + Position: types.NewPositionFromMarket(s.Market), + } + } else { + s.state = &state + } + } + + // init profit stats + s.state.ProfitStats.Init(s.Market) + + // field guards + if s.state.FilledBuyGrids == nil { + s.state.FilledBuyGrids = make(map[fixedpoint.Value]struct{}) + } + if s.state.FilledSellGrids == nil { + s.state.FilledSellGrids = make(map[fixedpoint.Value]struct{}) + } + + return nil +} + +func (s *Strategy) SaveState() error { + if s.Persistence != nil { + log.Infof("backing up grid state...") + + instanceID := s.InstanceID() + s.state.Orders = s.activeOrders.Backup() + + if err := s.Persistence.Save(s.state, ID, instanceID); err != nil { + return err + } + } + return nil +} + +// InstanceID returns the instance identifier from the current grid configuration parameters +func (s *Strategy) InstanceID() string { + return fmt.Sprintf("%s-%s-%d-%d", ID, s.Symbol, s.GridNum, s.LowerPrice.Int()) +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + if s.GridNum == 0 { + s.GridNum = 10 + } + + instanceID := s.InstanceID() + s.groupID = util.FNV32(instanceID) + log.Infof("using group id %d from fnv(%s)", s.groupID, instanceID) + + if err := s.LoadState(); err != nil { + return err + } + + s.Notify("grid %s position", s.Symbol, s.state.Position) + + s.orderStore = bbgo.NewOrderStore(s.Symbol) + s.orderStore.BindStream(session.UserDataStream) + + s.activeOrders = bbgo.NewActiveOrderBook(s.Symbol) + s.activeOrders.OnFilled(s.handleFilledOrder) + s.activeOrders.BindStream(session.UserDataStream) + + s.tradeCollector = bbgo.NewTradeCollector(s.Symbol, s.state.Position, s.orderStore) + + s.tradeCollector.OnTrade(func(trade types.Trade, profit, netProfit fixedpoint.Value) { + s.Notifiability.Notify(trade) + }) + + s.tradeCollector.OnPositionUpdate(func(position *types.Position) { + s.Notifiability.Notify(position) + }) + s.tradeCollector.BindStream(session.UserDataStream) + + s.currentLowerGrid = 0 + s.currentUpperGrid = 0 + + s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { + defer wg.Done() + + if err := s.SaveState(); err != nil { + log.WithError(err).Errorf("can not save state: %+v", s.state) + } else { + s.Notify("%s: %s grid is saved", ID, s.Symbol) + } + + // now we can cancel the open orders + log.Infof("canceling %d active orders...", s.activeOrders.NumOfOrders()) + if err := session.Exchange.CancelOrders(ctx, s.activeOrders.Orders()...); err != nil { + log.WithError(err).Errorf("cancel order error") + } + + //log.Infoln(s.state.ProfitStats.PlainText()) + }) + session.MarketDataStream.OnConnect(func() {}) + session.UserDataStream.OnStart(func() { + if len(s.state.Orders) > 0 { + s.Notifiability.Notify("restoring %s %d grid orders...", s.Symbol, len(s.state.Orders)) + + createdOrders, err := orderExecutor.SubmitOrders(ctx, s.state.Orders...) + if err != nil { + log.WithError(err).Error("active orders restore error") + } + s.activeOrders.Add(createdOrders...) + s.orderStore.Add(createdOrders...) + } else { + s.placeInfiniteGridOrders(orderExecutor, session) + } + }) + + return nil +} diff --git a/pkg/strategy/kline/strategy.go b/pkg/strategy/kline/strategy.go new file mode 100644 index 0000000000..2c800efaa2 --- /dev/null +++ b/pkg/strategy/kline/strategy.go @@ -0,0 +1,43 @@ +package kline + +import ( + "context" + + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/types" +) + +const ID = "kline" + +var log = logrus.WithField("strategy", ID) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type Strategy struct { + Symbol string `json:"symbol"` + MovingAverage types.IntervalWindow `json:"movingAverage"` +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.MovingAverage.Interval}) +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + // skip k-lines from other symbols + if kline.Symbol != s.Symbol { + return + } + + log.Infof("%s", kline.String()) + }) + return nil +} diff --git a/pkg/strategy/marketcap/strategy.go b/pkg/strategy/marketcap/strategy.go new file mode 100644 index 0000000000..bf809e98be --- /dev/null +++ b/pkg/strategy/marketcap/strategy.go @@ -0,0 +1,244 @@ +package marketcap + +import ( + "context" + "fmt" + "os" + + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/datasource/glassnode" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +const ID = "marketcap" + +var log = logrus.WithField("strategy", ID) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type Strategy struct { + Notifiability *bbgo.Notifiability + glassnode *glassnode.DataSource + + Interval types.Interval `json:"interval"` + BaseCurrency string `json:"baseCurrency"` + BaseWeight fixedpoint.Value `json:"baseWeight"` + TargetCurrencies []string `json:"targetCurrencies"` + Threshold fixedpoint.Value `json:"threshold"` + Verbose bool `json:"verbose"` + DryRun bool `json:"dryRun"` + // max amount to buy or sell per order + MaxAmount fixedpoint.Value `json:"maxAmount"` + + activeOrderBook *bbgo.ActiveOrderBook +} + +func (s *Strategy) Initialize() error { + apiKey := os.Getenv("GLASSNODE_API_KEY") + s.glassnode = glassnode.New(apiKey) + return nil +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) Validate() error { + if len(s.TargetCurrencies) == 0 { + return fmt.Errorf("taretCurrencies should not be empty") + } + + for _, c := range s.TargetCurrencies { + if c == s.BaseCurrency { + return fmt.Errorf("targetCurrencies contain baseCurrency") + } + } + + if s.Threshold.Sign() < 0 { + return fmt.Errorf("threshold should not less than 0") + } + + if s.MaxAmount.Sign() < 0 { + return fmt.Errorf("maxAmount shoud not less than 0") + } + + return nil +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + for _, symbol := range s.symbols() { + session.Subscribe(types.KLineChannel, symbol, types.SubscribeOptions{Interval: s.Interval}) + } +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + s.activeOrderBook = bbgo.NewActiveOrderBook("") + s.activeOrderBook.BindStream(session.UserDataStream) + + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + s.rebalance(ctx, orderExecutor, session) + }) + return nil +} + +func (s *Strategy) rebalance(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) { + if err := orderExecutor.CancelOrders(ctx, s.activeOrderBook.Orders()...); err != nil { + log.WithError(err).Error("failed to cancel orders") + } + + submitOrders := s.generateSubmitOrders(ctx, session) + for _, submitOrder := range submitOrders { + log.Infof("generated submit order: %s", submitOrder.String()) + } + + if s.DryRun { + return + } + + createdOrders, err := orderExecutor.SubmitOrders(ctx, submitOrders...) + if err != nil { + log.WithError(err).Error("failed to submit orders") + return + } + + s.activeOrderBook.Add(createdOrders...) +} + +func (s *Strategy) generateSubmitOrders(ctx context.Context, session *bbgo.ExchangeSession) (submitOrders []types.SubmitOrder) { + targetWeights := s.getTargetWeights(ctx) + prices := s.prices(ctx, session) + marketValues := prices.Mul(s.quantities(session)) + currentWeights := marketValues.Normalize() + + for currency, targetWeight := range targetWeights { + symbol := currency + s.BaseCurrency + currentWeight := currentWeights[currency] + currentPrice := prices[currency] + + log.Infof("%s price: %v, current weight: %v, target weight: %v", + symbol, + currentPrice, + currentWeight, + targetWeight) + + // calculate the difference between current weight and target weight + // if the difference is less than threshold, then we will not create the order + weightDifference := targetWeight.Sub(currentWeight) + if weightDifference.Abs().Compare(s.Threshold) < 0 { + log.Infof("%s weight distance |%v - %v| = |%v| less than the threshold: %v", + symbol, + currentWeight, + targetWeight, + weightDifference, + s.Threshold) + continue + } + + quantity := weightDifference.Mul(marketValues.Sum()).Div(currentPrice) + + side := types.SideTypeBuy + if quantity.Sign() < 0 { + side = types.SideTypeSell + quantity = quantity.Abs() + } + + if s.MaxAmount.Sign() > 0 { + quantity = bbgo.AdjustQuantityByMaxAmount(quantity, currentPrice, s.MaxAmount) + log.Infof("adjust the quantity %v (%s %s @ %v) by max amount %v", + quantity, + symbol, + side.String(), + currentPrice, + s.MaxAmount) + } + + order := types.SubmitOrder{ + Symbol: symbol, + Side: side, + Type: types.OrderTypeLimit, + Quantity: quantity, + Price: currentPrice, + } + + submitOrders = append(submitOrders, order) + } + return submitOrders +} + +func (s *Strategy) getTargetWeights(ctx context.Context) types.ValueMap { + m := types.FloatMap{} + + // get market cap values + for _, currency := range s.TargetCurrencies { + marketCap, err := s.glassnode.QueryMarketCapInUSD(ctx, currency) + if err != nil { + log.WithError(err).Error("failed to query market cap") + return nil + } + m[currency] = marketCap + } + + // normalize + m = m.Normalize() + + // rescale by 1 - baseWeight + m = m.MulScalar(1.0 - s.BaseWeight.Float64()) + + // append base weight + m[s.BaseCurrency] = s.BaseWeight.Float64() + + // convert to types.ValueMap + targetWeights := types.ValueMap{} + for currency, weight := range m { + targetWeights[currency] = fixedpoint.NewFromFloat(weight) + } + + return targetWeights +} + +func (s *Strategy) prices(ctx context.Context, session *bbgo.ExchangeSession) types.ValueMap { + tickers, err := session.Exchange.QueryTickers(ctx, s.symbols()...) + if err != nil { + log.WithError(err).Error("failed to query tickers") + return nil + } + + prices := types.ValueMap{} + for _, currency := range s.TargetCurrencies { + prices[currency] = tickers[currency+s.BaseCurrency].Last + } + + // append base currency price + prices[s.BaseCurrency] = fixedpoint.One + + return prices +} + +func (s *Strategy) quantities(session *bbgo.ExchangeSession) types.ValueMap { + balances := session.Account.Balances() + + quantities := types.ValueMap{} + for _, currency := range s.currencies() { + quantities[currency] = balances[currency].Total() + } + + return quantities +} + +func (s *Strategy) symbols() (symbols []string) { + for _, currency := range s.TargetCurrencies { + symbols = append(symbols, currency+s.BaseCurrency) + } + return symbols +} + +func (s *Strategy) currencies() (currencies []string) { + currencies = append(currencies, s.TargetCurrencies...) + currencies = append(currencies, s.BaseCurrency) + return currencies +} diff --git a/pkg/strategy/mirrormaker/main.go b/pkg/strategy/mirrormaker/main.go deleted file mode 100644 index bb0ea0a5e8..0000000000 --- a/pkg/strategy/mirrormaker/main.go +++ /dev/null @@ -1,304 +0,0 @@ -package mirrormaker - -import ( - "context" - "fmt" - "sync" - "time" - - "github.com/sirupsen/logrus" - - "github.com/c9s/bbgo/pkg/bbgo" - "github.com/c9s/bbgo/pkg/fixedpoint" - "github.com/c9s/bbgo/pkg/types" -) - -var defaultMargin = fixedpoint.NewFromFloat(0.01) - -var defaultQuantity = fixedpoint.NewFromFloat(0.001) - -var log = logrus.WithField("strategy", "mirrormaker") - -func init() { - bbgo.RegisterStrategy("mirrormaker", &Strategy{}) -} - -type Strategy struct { - *bbgo.Graceful - *bbgo.Persistence - - Symbol string `json:"symbol"` - SourceExchange string `json:"sourceExchange"` - MakerExchange string `json:"makerExchange"` - - UpdateInterval time.Duration `json:"updateInterval"` - Margin fixedpoint.Value `json:"margin"` - BidMargin fixedpoint.Value `json:"bidMargin"` - AskMargin fixedpoint.Value `json:"askMargin"` - Quantity fixedpoint.Value `json:"quantity"` - QuantityMultiplier fixedpoint.Value `json:"quantityMultiplier"` - - NumLayers int `json:"numLayers"` - Pips int `json:"pips"` - - makerSession *bbgo.ExchangeSession - sourceSession *bbgo.ExchangeSession - - sourceMarket types.Market - makerMarket types.Market - - book *types.StreamOrderBook - activeMakerOrders *bbgo.LocalActiveOrderBook - - orderStore *bbgo.OrderStore - - Position fixedpoint.Value - lastPrice float64 - - stopC chan struct{} -} - -func (s *Strategy) CrossSubscribe(sessions map[string]*bbgo.ExchangeSession) { - sourceSession, ok := sessions[s.SourceExchange] - if !ok { - panic(fmt.Errorf("source exchange %s is not defined", s.SourceExchange)) - } - - log.Infof("subscribing %s from %s", s.Symbol, s.SourceExchange) - sourceSession.Subscribe(types.BookChannel, s.Symbol, types.SubscribeOptions{}) -} - -func (s *Strategy) updateQuote(ctx context.Context) { - if err := s.makerSession.Exchange.CancelOrders(ctx, s.activeMakerOrders.Orders()...); err != nil { - log.WithError(err).Errorf("can not cancel orders") - return - } - - // avoid unlock issue - time.Sleep(100 * time.Millisecond) - - sourceBook := s.book.Get() - if len(sourceBook.Bids) == 0 || len(sourceBook.Asks) == 0 { - return - } - - bestBidPrice := sourceBook.Bids[0].Price - bestAskPrice := sourceBook.Asks[0].Price - log.Infof("best bid price %f, best ask price: %f", bestBidPrice.Float64(), bestAskPrice.Float64()) - - bidQuantity := s.Quantity - bidPrice := bestBidPrice.MulFloat64(1.0 - s.BidMargin.Float64()) - - askQuantity := s.Quantity - askPrice := bestAskPrice.MulFloat64(1.0 + s.AskMargin.Float64()) - - log.Infof("quote bid price: %f ask price: %f", bidPrice.Float64(), askPrice.Float64()) - - var submitOrders []types.SubmitOrder - - balances := s.makerSession.Account.Balances() - makerQuota := &bbgo.QuotaTransaction{} - if b, ok := balances[s.makerMarket.BaseCurrency]; ok { - makerQuota.BaseAsset.Add(b.Available) - } - if b, ok := balances[s.makerMarket.QuoteCurrency]; ok { - makerQuota.QuoteAsset.Add(b.Available) - } - - hedgeBalances := s.sourceSession.Account.Balances() - hedgeQuota := &bbgo.QuotaTransaction{} - if b, ok := hedgeBalances[s.sourceMarket.BaseCurrency]; ok { - hedgeQuota.BaseAsset.Add(b.Available) - } - if b, ok := hedgeBalances[s.sourceMarket.QuoteCurrency]; ok { - hedgeQuota.QuoteAsset.Add(b.Available) - } - - log.Infof("maker quota: %+v", makerQuota) - log.Infof("hedge quota: %+v", hedgeQuota) - - for i := 0; i < s.NumLayers; i++ { - // bid orders - if makerQuota.QuoteAsset.Lock(bidQuantity.Mul(bidPrice)) && hedgeQuota.BaseAsset.Lock(bidQuantity) { - // if we bought, then we need to sell the base from the hedge session - submitOrders = append(submitOrders, types.SubmitOrder{ - Symbol: s.Symbol, - Type: types.OrderTypeLimit, - Side: types.SideTypeBuy, - Price: bidPrice.Float64(), - Quantity: bidQuantity.Float64(), - TimeInForce: "GTC", - }) - - makerQuota.Commit() - hedgeQuota.Commit() - } else { - makerQuota.Rollback() - hedgeQuota.Rollback() - } - - // ask orders - if makerQuota.BaseAsset.Lock(askQuantity) && hedgeQuota.QuoteAsset.Lock(askQuantity.Mul(askPrice)) { - // if we bought, then we need to sell the base from the hedge session - submitOrders = append(submitOrders, types.SubmitOrder{ - Symbol: s.Symbol, - Type: types.OrderTypeLimit, - Side: types.SideTypeSell, - Price: askPrice.Float64(), - Quantity: askQuantity.Float64(), - TimeInForce: "GTC", - }) - makerQuota.Commit() - hedgeQuota.Commit() - } else { - makerQuota.Rollback() - hedgeQuota.Rollback() - } - - bidPrice -= fixedpoint.NewFromFloat(s.makerMarket.TickSize * float64(s.Pips)) - askPrice += fixedpoint.NewFromFloat(s.makerMarket.TickSize * float64(s.Pips)) - - askQuantity = askQuantity.Mul(s.QuantityMultiplier) - bidQuantity = bidQuantity.Mul(s.QuantityMultiplier) - } - - if len(submitOrders) == 0 { - return - } - - makerOrderExecutor := &bbgo.ExchangeOrderExecutor{Session: s.makerSession} - makerOrders, err := makerOrderExecutor.SubmitOrders(ctx, submitOrders...) - if err != nil { - log.WithError(err).Errorf("order submit error") - return - } - - s.activeMakerOrders.Add(makerOrders...) - s.orderStore.Add(makerOrders...) -} - -func (s *Strategy) handleTradeUpdate(trade types.Trade) { - log.Infof("received trade %+v", trade) - if s.orderStore.Exists(trade.OrderID) { - log.Infof("identified trade %d with an existing order: %d", trade.ID, trade.OrderID) - - q := fixedpoint.NewFromFloat(trade.Quantity) - if trade.Side == types.SideTypeSell { - q = -q - } - - s.Position.AtomicAdd(q) - - pos := s.Position.AtomicLoad() - log.Warnf("position changed: %f", pos.Float64()) - - s.lastPrice = trade.Price - } -} - -func (s *Strategy) CrossRun(ctx context.Context, _ bbgo.OrderExecutionRouter, sessions map[string]*bbgo.ExchangeSession) error { - if s.UpdateInterval == 0 { - s.UpdateInterval = time.Second - } - - if s.NumLayers == 0 { - s.NumLayers = 1 - } - - if s.BidMargin == 0 { - if s.Margin != 0 { - s.BidMargin = s.Margin - } else { - s.BidMargin = defaultMargin - } - } - - if s.AskMargin == 0 { - if s.Margin != 0 { - s.AskMargin = s.Margin - } else { - s.AskMargin = defaultMargin - } - } - - if s.Quantity == 0 { - s.Quantity = defaultQuantity - } - - sourceSession, ok := sessions[s.SourceExchange] - if !ok { - return fmt.Errorf("source exchange session %s is not defined", s.SourceExchange) - } - - s.sourceSession = sourceSession - - makerSession, ok := sessions[s.MakerExchange] - if !ok { - return fmt.Errorf("maker exchange session %s is not defined", s.MakerExchange) - } - - s.makerSession = makerSession - - s.sourceMarket, ok = s.sourceSession.Market(s.Symbol) - if !ok { - return fmt.Errorf("source session market %s is not defined", s.Symbol) - } - - s.makerMarket, ok = s.makerSession.Market(s.Symbol) - if !ok { - return fmt.Errorf("maker session market %s is not defined", s.Symbol) - } - - s.book = types.NewStreamBook(s.Symbol) - s.book.BindStream(s.sourceSession.Stream) - - s.makerSession.Stream.OnTradeUpdate(s.handleTradeUpdate) - - s.activeMakerOrders = bbgo.NewLocalActiveOrderBook() - s.activeMakerOrders.BindStream(s.makerSession.Stream) - - s.orderStore = bbgo.NewOrderStore(s.Symbol) - s.orderStore.BindStream(s.makerSession.Stream) - - s.stopC = make(chan struct{}) - - if err := s.Persistence.Load(&s.Position, "position"); err != nil { - log.WithError(err).Warnf("can not load position") - } else { - log.Infof("position is loaded successfully, position=%f", s.Position.Float64()) - } - - go func() { - ticker := time.NewTicker(s.UpdateInterval) - defer ticker.Stop() - for { - select { - - case <-s.stopC: - return - - case <-ctx.Done(): - return - - case <-ticker.C: - s.updateQuote(ctx) - } - } - }() - - s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { - defer wg.Done() - - close(s.stopC) - - if err := s.Persistence.Save(&s.Position, "position"); err != nil { - log.WithError(err).Error("persistence save error") - } - - if err := s.makerSession.Exchange.CancelOrders(ctx, s.activeMakerOrders.Orders()...); err != nil { - log.WithError(err).Errorf("can not cancel orders") - } - }) - - return nil -} diff --git a/pkg/strategy/pivotshort/strategy.go b/pkg/strategy/pivotshort/strategy.go new file mode 100644 index 0000000000..c9b28d631a --- /dev/null +++ b/pkg/strategy/pivotshort/strategy.go @@ -0,0 +1,559 @@ +package pivotshort + +import ( + "context" + "fmt" + "os" + "sort" + "sync" + + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" +) + +const ID = "pivotshort" + +var log = logrus.WithField("strategy", ID) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type IntervalWindowSetting struct { + types.IntervalWindow +} + +// BreakLow -- when price breaks the previous pivot low, we set a trade entry +type BreakLow struct { + Ratio fixedpoint.Value `json:"ratio"` + MarketOrder bool `json:"marketOrder"` + BounceRatio fixedpoint.Value `json:"bounceRatio"` + Quantity fixedpoint.Value `json:"quantity"` + StopEMARange fixedpoint.Value `json:"stopEMARange"` + StopEMA *types.IntervalWindow `json:"stopEMA"` +} + +type BounceShort struct { + Enabled bool `json:"enabled"` + + types.IntervalWindow + + MinDistance fixedpoint.Value `json:"minDistance"` + NumLayers int `json:"numLayers"` + LayerSpread fixedpoint.Value `json:"layerSpread"` + Quantity fixedpoint.Value `json:"quantity"` + Ratio fixedpoint.Value `json:"ratio"` +} + +type Entry struct { + CatBounceRatio fixedpoint.Value `json:"catBounceRatio"` + NumLayers int `json:"numLayers"` + TotalQuantity fixedpoint.Value `json:"totalQuantity"` + + Quantity fixedpoint.Value `json:"quantity"` + MarginSideEffect types.MarginOrderSideEffectType `json:"marginOrderSideEffect"` +} + +type CumulatedVolume struct { + Enabled bool `json:"enabled"` + MinQuoteVolume fixedpoint.Value `json:"minQuoteVolume"` + Window int `json:"window"` +} + +type Exit struct { + RoiStopLossPercentage fixedpoint.Value `json:"roiStopLossPercentage"` + RoiTakeProfitPercentage fixedpoint.Value `json:"roiTakeProfitPercentage"` + RoiMinTakeProfitPercentage fixedpoint.Value `json:"roiMinTakeProfitPercentage"` + + LowerShadowRatio fixedpoint.Value `json:"lowerShadowRatio"` + + CumulatedVolume *CumulatedVolume `json:"cumulatedVolume"` + + MarginSideEffect types.MarginOrderSideEffectType `json:"marginOrderSideEffect"` +} + +type Strategy struct { + *bbgo.Graceful + *bbgo.Persistence + + Environment *bbgo.Environment + Symbol string `json:"symbol"` + Market types.Market + + // pivot interval and window + types.IntervalWindow + + // persistence fields + Position *types.Position `persistence:"position"` + ProfitStats *types.ProfitStats `persistence:"profit_stats"` + TradeStats *types.TradeStats `persistence:"trade_stats"` + + BreakLow BreakLow `json:"breakLow"` + + BounceShort *BounceShort `json:"bounceShort"` + + Entry Entry `json:"entry"` + Exit Exit `json:"exit"` + + session *bbgo.ExchangeSession + orderExecutor *bbgo.GeneralOrderExecutor + + lastLow fixedpoint.Value + pivot *indicator.Pivot + resistancePivot *indicator.Pivot + stopEWMA *indicator.EWMA + pivotLowPrices []fixedpoint.Value + resistancePrices []float64 + currentBounceShortPrice fixedpoint.Value + + // StrategyController + bbgo.StrategyController +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.Interval}) + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: types.Interval1m}) + + if s.BounceShort != nil && s.BounceShort.Enabled { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.BounceShort.Interval}) + } +} + +func (s *Strategy) useQuantityOrBaseBalance(quantity fixedpoint.Value) fixedpoint.Value { + if quantity.IsZero() { + if balance, ok := s.session.Account.Balance(s.Market.BaseCurrency); ok { + bbgo.Notify("sell quantity is not set, submitting sell with all base balance: %s", balance.Available.String()) + quantity = balance.Available + } + } + + if quantity.IsZero() { + log.Errorf("quantity is zero, can not submit sell order, please check settings") + } + + return quantity +} + +func (s *Strategy) placeLimitSell(ctx context.Context, price, quantity fixedpoint.Value) { + _, _ = s.orderExecutor.SubmitOrders(ctx, types.SubmitOrder{ + Symbol: s.Symbol, + Price: price, + Side: types.SideTypeSell, + Type: types.OrderTypeLimit, + Quantity: quantity, + MarginSideEffect: types.SideEffectTypeMarginBuy, + }) +} + +func (s *Strategy) placeMarketSell(ctx context.Context, quantity fixedpoint.Value) { + _, _ = s.orderExecutor.SubmitOrders(ctx, types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeSell, + Type: types.OrderTypeMarket, + Quantity: quantity, + MarginSideEffect: types.SideEffectTypeMarginBuy, + }) +} + +func (s *Strategy) CurrentPosition() *types.Position { + return s.Position +} + +func (s *Strategy) ClosePosition(ctx context.Context, percentage fixedpoint.Value) error { + // Cancel active orders + _ = s.orderExecutor.GracefulCancel(ctx) + + submitOrder := s.Position.NewMarketCloseOrder(percentage) // types.SubmitOrder{ + if submitOrder == nil { + return nil + } + + if s.session.Margin { + submitOrder.MarginSideEffect = s.Exit.MarginSideEffect + } + + bbgo.Notify("Closing %s position by %f", s.Symbol, percentage.Float64()) + log.Infof("Closing %s position by %f", s.Symbol, percentage.Float64()) + _, err := s.orderExecutor.SubmitOrders(ctx, *submitOrder) + if err != nil { + bbgo.Notify("close %s position error", s.Symbol) + log.WithError(err).Errorf("close %s position error", s.Symbol) + } + return err +} + +func (s *Strategy) InstanceID() string { + return fmt.Sprintf("%s:%s", ID, s.Symbol) +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + var instanceID = s.InstanceID() + + if s.Position == nil { + s.Position = types.NewPositionFromMarket(s.Market) + } + + if s.ProfitStats == nil { + s.ProfitStats = types.NewProfitStats(s.Market) + } + + // trade stats + if s.TradeStats == nil { + s.TradeStats = &types.TradeStats{} + } + + // StrategyController + s.Status = types.StrategyStatusRunning + + s.OnSuspend(func() { + // Cancel active orders + _ = s.orderExecutor.GracefulCancel(ctx) + }) + + s.OnEmergencyStop(func() { + // Cancel active orders + _ = s.orderExecutor.GracefulCancel(ctx) + // Close 100% position + _ = s.ClosePosition(ctx, fixedpoint.One) + }) + + // initial required information + s.session = session + s.orderExecutor = bbgo.NewGeneralOrderExecutor(session, s.Symbol, ID, instanceID, s.Position) + s.orderExecutor.BindEnvironment(s.Environment) + s.orderExecutor.BindProfitStats(s.ProfitStats) + s.orderExecutor.BindTradeStats(s.TradeStats) + s.orderExecutor.Bind() + + store, _ := session.MarketDataStore(s.Symbol) + + s.pivot = &indicator.Pivot{IntervalWindow: s.IntervalWindow} + s.pivot.Bind(store) + + if s.BounceShort != nil && s.BounceShort.Enabled { + s.resistancePivot = &indicator.Pivot{IntervalWindow: s.BounceShort.IntervalWindow} + s.resistancePivot.Bind(store) + } + + standardIndicator, _ := session.StandardIndicatorSet(s.Symbol) + if s.BreakLow.StopEMA != nil { + s.stopEWMA = standardIndicator.EWMA(*s.BreakLow.StopEMA) + } + + s.lastLow = fixedpoint.Zero + + session.UserDataStream.OnStart(func() { + lastKLine := s.preloadPivot(s.pivot, store) + + if s.resistancePivot != nil { + s.preloadPivot(s.resistancePivot, store) + } + + if lastKLine == nil { + return + } + + if s.resistancePivot != nil { + lows := s.resistancePivot.Lows + minDistance := s.BounceShort.MinDistance.Float64() + closePrice := lastKLine.Close.Float64() + s.resistancePrices = findPossibleResistancePrices(closePrice, minDistance, lows) + log.Infof("last price: %f, possible resistance prices: %+v", closePrice, s.resistancePrices) + + if len(s.resistancePrices) > 0 { + resistancePrice := fixedpoint.NewFromFloat(s.resistancePrices[0]) + if resistancePrice.Compare(s.currentBounceShortPrice) != 0 { + log.Infof("updating resistance price... possible resistance prices: %+v", s.resistancePrices) + + _ = s.orderExecutor.GracefulCancel(ctx) + + s.currentBounceShortPrice = resistancePrice + s.placeBounceSellOrders(ctx, s.currentBounceShortPrice, orderExecutor) + } + } + } + }) + + // Always check whether you can open a short position or not + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + // StrategyController + if s.Status != types.StrategyStatusRunning { + return + } + + if kline.Symbol != s.Symbol || kline.Interval != types.Interval1m { + return + } + + isPositionOpened := !s.Position.IsClosed() && !s.Position.IsDust(kline.Close) + + if isPositionOpened && s.Position.IsShort() { + // calculate return rate + // TODO: apply quantity to this formula + roi := s.Position.AverageCost.Sub(kline.Close).Div(s.Position.AverageCost) + if roi.Compare(s.Exit.RoiStopLossPercentage.Neg()) < 0 { + // stop loss + bbgo.Notify("%s ROI StopLoss triggered at price %f: Loss %s", s.Symbol, kline.Close.Float64(), roi.Percentage()) + _ = s.ClosePosition(ctx, fixedpoint.One) + return + } else { + // take profit + if roi.Compare(s.Exit.RoiTakeProfitPercentage) > 0 { // force take profit + bbgo.Notify("%s TakeProfit triggered at price %f: by ROI percentage %s", s.Symbol, kline.Close.Float64(), roi.Percentage(), kline) + _ = s.ClosePosition(ctx, fixedpoint.One) + return + } else if !s.Exit.RoiMinTakeProfitPercentage.IsZero() && roi.Compare(s.Exit.RoiMinTakeProfitPercentage) > 0 { + if !s.Exit.LowerShadowRatio.IsZero() && kline.GetLowerShadowHeight().Div(kline.Close).Compare(s.Exit.LowerShadowRatio) > 0 { + bbgo.Notify("%s TakeProfit triggered at price %f: by shadow ratio %f", + s.Symbol, + kline.Close.Float64(), + kline.GetLowerShadowRatio().Float64(), kline) + _ = s.ClosePosition(ctx, fixedpoint.One) + return + } else if s.Exit.CumulatedVolume != nil && s.Exit.CumulatedVolume.Enabled { + if klines, ok := store.KLinesOfInterval(s.Interval); ok { + var cbv = fixedpoint.Zero + var cqv = fixedpoint.Zero + for i := 0; i < s.Exit.CumulatedVolume.Window; i++ { + last := (*klines)[len(*klines)-1-i] + cqv = cqv.Add(last.QuoteVolume) + cbv = cbv.Add(last.Volume) + } + + if cqv.Compare(s.Exit.CumulatedVolume.MinQuoteVolume) > 0 { + bbgo.Notify("%s TakeProfit triggered at price %f: by cumulated volume (window: %d) %f > %f", + s.Symbol, + kline.Close.Float64(), + s.Exit.CumulatedVolume.Window, + cqv.Float64(), + s.Exit.CumulatedVolume.MinQuoteVolume.Float64()) + _ = s.ClosePosition(ctx, fixedpoint.One) + return + } + } + } + } + } + } + + if len(s.pivotLowPrices) == 0 { + return + } + + previousLow := s.pivotLowPrices[len(s.pivotLowPrices)-1] + + // truncate the pivot low prices + if len(s.pivotLowPrices) > 10 { + s.pivotLowPrices = s.pivotLowPrices[len(s.pivotLowPrices)-10:] + } + + if s.stopEWMA != nil && !s.BreakLow.StopEMARange.IsZero() { + ema := fixedpoint.NewFromFloat(s.stopEWMA.Last()) + if ema.IsZero() { + return + } + + emaStopShortPrice := ema.Mul(fixedpoint.One.Sub(s.BreakLow.StopEMARange)) + if kline.Close.Compare(emaStopShortPrice) < 0 { + return + } + } + + ratio := fixedpoint.One.Sub(s.BreakLow.Ratio) + breakPrice := previousLow.Mul(ratio) + + // if previous low is not break, skip + if kline.Close.Compare(breakPrice) >= 0 { + return + } + + if !s.Position.IsClosed() && !s.Position.IsDust(kline.Close) { + // s.Notify("skip opening %s position, which is not closed", s.Symbol, s.Position) + return + } + + _ = s.orderExecutor.GracefulCancel(ctx) + + quantity := s.useQuantityOrBaseBalance(s.BreakLow.Quantity) + if s.BreakLow.MarketOrder { + bbgo.Notify("%s price %f breaks the previous low %f with ratio %f, submitting market sell to open a short position", s.Symbol, kline.Close.Float64(), previousLow.Float64(), s.BreakLow.Ratio.Float64()) + s.placeMarketSell(ctx, quantity) + } else { + sellPrice := kline.Close.Mul(fixedpoint.One.Add(s.BreakLow.BounceRatio)) + s.placeLimitSell(ctx, sellPrice, quantity) + } + }) + + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + // StrategyController + if s.Status != types.StrategyStatusRunning { + return + } + + if s.BounceShort == nil || !s.BounceShort.Enabled { + return + } + + if kline.Symbol != s.Symbol || kline.Interval != s.BounceShort.Interval { + return + } + + if s.resistancePivot != nil { + closePrice := kline.Close.Float64() + minDistance := s.BounceShort.MinDistance.Float64() + lows := s.resistancePivot.Lows + s.resistancePrices = findPossibleResistancePrices(closePrice, minDistance, lows) + + if len(s.resistancePrices) > 0 { + resistancePrice := fixedpoint.NewFromFloat(s.resistancePrices[0]) + if resistancePrice.Compare(s.currentBounceShortPrice) != 0 { + log.Infof("updating resistance price... possible resistance prices: %+v", s.resistancePrices) + + _ = s.orderExecutor.GracefulCancel(ctx) + + s.currentBounceShortPrice = resistancePrice + s.placeBounceSellOrders(ctx, s.currentBounceShortPrice, orderExecutor) + } + } + } + }) + + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + // StrategyController + if s.Status != types.StrategyStatusRunning { + return + } + + if kline.Symbol != s.Symbol || kline.Interval != s.Interval { + return + } + + if s.pivot.LastLow() > 0.0 { + lastLow := fixedpoint.NewFromFloat(s.pivot.LastLow()) + if lastLow.Compare(s.lastLow) != 0 { + log.Infof("new pivot low detected: %f %s", s.pivot.LastLow(), kline.EndTime.Time()) + } + + s.lastLow = lastLow + s.pivotLowPrices = append(s.pivotLowPrices, s.lastLow) + } + }) + + s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { + _, _ = fmt.Fprintln(os.Stderr, s.TradeStats.String()) + wg.Done() + }) + + return nil +} + +func (s *Strategy) findHigherPivotLow(price fixedpoint.Value) (fixedpoint.Value, bool) { + for l := len(s.pivotLowPrices) - 1; l > 0; l-- { + if s.pivotLowPrices[l].Compare(price) > 0 { + return s.pivotLowPrices[l], true + } + } + + return price, false +} + +func (s *Strategy) placeBounceSellOrders(ctx context.Context, resistancePrice fixedpoint.Value, orderExecutor bbgo.OrderExecutor) { + futuresMode := s.session.Futures || s.session.IsolatedFutures + totalQuantity := s.BounceShort.Quantity + numLayers := s.BounceShort.NumLayers + if numLayers == 0 { + numLayers = 1 + } + + numLayersF := fixedpoint.NewFromInt(int64(numLayers)) + + layerSpread := s.BounceShort.LayerSpread + quantity := totalQuantity.Div(numLayersF) + + log.Infof("placing bounce short orders: resistance price = %f, layer quantity = %f, num of layers = %d", resistancePrice.Float64(), quantity.Float64(), numLayers) + + for i := 0; i < numLayers; i++ { + balances := s.session.GetAccount().Balances() + quoteBalance := balances[s.Market.QuoteCurrency] + baseBalance := balances[s.Market.BaseCurrency] + + // price = (resistance_price * (1.0 - ratio)) * ((1.0 + layerSpread) * i) + price := resistancePrice.Mul(fixedpoint.One.Sub(s.BounceShort.Ratio)) + spread := layerSpread.Mul(fixedpoint.NewFromInt(int64(i))) + price = price.Add(spread) + log.Infof("price = %f", price.Float64()) + + log.Infof("placing bounce short order #%d: price = %f, quantity = %f", i, price.Float64(), quantity.Float64()) + + if futuresMode { + if quantity.Mul(price).Compare(quoteBalance.Available) <= 0 { + s.placeOrder(ctx, price, quantity) + } + } else { + if quantity.Compare(baseBalance.Available) <= 0 { + s.placeOrder(ctx, price, quantity) + } + } + } +} + +func (s *Strategy) placeOrder(ctx context.Context, price fixedpoint.Value, quantity fixedpoint.Value) { + _, _ = s.orderExecutor.SubmitOrders(ctx, types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeSell, + Type: types.OrderTypeLimit, + Price: price, + Quantity: quantity, + }) +} + +func (s *Strategy) preloadPivot(pivot *indicator.Pivot, store *bbgo.MarketDataStore) *types.KLine { + klines, ok := store.KLinesOfInterval(pivot.Interval) + if !ok { + return nil + } + + last := (*klines)[len(*klines)-1] + log.Infof("last %s price: %f", s.Symbol, last.Close.Float64()) + log.Debugf("updating pivot indicator: %d klines", len(*klines)) + + for i := pivot.Window; i < len(*klines); i++ { + pivot.Update((*klines)[0 : i+1]) + } + + log.Infof("found %s %v previous lows: %v", s.Symbol, pivot.IntervalWindow, pivot.Lows) + log.Infof("found %s %v previous highs: %v", s.Symbol, pivot.IntervalWindow, pivot.Highs) + return &last +} + +func findPossibleResistancePrices(closePrice float64, minDistance float64, lows []float64) []float64 { + // sort float64 in increasing order + sort.Float64s(lows) + + var resistancePrices []float64 + for _, low := range lows { + if low < closePrice { + continue + } + + last := closePrice + if len(resistancePrices) > 0 { + last = resistancePrices[len(resistancePrices)-1] + } + + if (low / last) < (1.0 + minDistance) { + continue + } + resistancePrices = append(resistancePrices, low) + } + + return resistancePrices +} diff --git a/pkg/strategy/pricealert/strategy.go b/pkg/strategy/pricealert/strategy.go index c715a1db6e..f6b15c6eeb 100644 --- a/pkg/strategy/pricealert/strategy.go +++ b/pkg/strategy/pricealert/strategy.go @@ -2,24 +2,27 @@ package pricealert import ( "context" - "math" "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" ) +const ID = "pricealert" + func init() { - bbgo.RegisterStrategy("pricealert", &Strategy{}) + bbgo.RegisterStrategy(ID, &Strategy{}) } type Strategy struct { - // The notification system will be injected into the strategy automatically. - bbgo.Notifiability - // These fields will be filled from the config file (it translates YAML to JSON) - Symbol string `json:"symbol"` - Interval string `json:"interval"` - MinChange float64 `json:"minChange"` + Symbol string `json:"symbol"` + Interval types.Interval `json:"interval"` + MinChange fixedpoint.Value `json:"minChange"` +} + +func (s *Strategy) ID() string { + return ID } func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { @@ -27,17 +30,17 @@ func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { } func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { - session.Stream.OnKLine(func(kline types.KLine) { + session.MarketDataStream.OnKLine(func(kline types.KLine) { market, ok := session.Market(kline.Symbol) if !ok { return } - if math.Abs(kline.GetChange()) > s.MinChange { - if channel, ok := s.RouteSymbol(s.Symbol); ok { - s.NotifyTo(channel, "%s hit price %s, change %f", s.Symbol, market.FormatPrice(kline.Close), kline.GetChange()) + if kline.GetChange().Abs().Compare(s.MinChange) > 0 { + if channel, ok := bbgo.Notification.RouteSymbol(s.Symbol); ok { + bbgo.NotifyTo(channel, "%s hit price %s, change %v", s.Symbol, market.FormatPrice(kline.Close), kline.GetChange()) } else { - s.Notify("%s hit price %s, change %f", s.Symbol, market.FormatPrice(kline.Close), kline.GetChange()) + bbgo.Notify("%s hit price %s, change %v", s.Symbol, market.FormatPrice(kline.Close), kline.GetChange()) } } }) diff --git a/pkg/strategy/buyandhold/strategy.go b/pkg/strategy/pricedrop/strategy.go similarity index 72% rename from pkg/strategy/buyandhold/strategy.go rename to pkg/strategy/pricedrop/strategy.go index 756865530e..bfca2577fb 100644 --- a/pkg/strategy/buyandhold/strategy.go +++ b/pkg/strategy/pricedrop/strategy.go @@ -1,9 +1,8 @@ -package buyandhold +package pricedrop import ( "context" "fmt" - "math" "github.com/sirupsen/logrus" @@ -12,25 +11,31 @@ import ( "github.com/c9s/bbgo/pkg/types" ) -var log = logrus.WithField("strategy", "buyandhold") +const ID = "pricedrop" + +var log = logrus.WithField("strategy", ID) func init() { - bbgo.RegisterStrategy("buyandhold", &Strategy{}) + bbgo.RegisterStrategy(ID, &Strategy{}) } type Strategy struct { Symbol string `json:"symbol"` Interval types.Interval `json:"interval"` - BaseQuantity float64 `json:"baseQuantity"` + BaseQuantity fixedpoint.Value `json:"baseQuantity"` MinDropPercentage fixedpoint.Value `json:"minDropPercentage"` MinDropChange fixedpoint.Value `json:"minDropChange"` MovingAverageWindow int `json:"movingAverageWindow"` } +func (s *Strategy) ID() string { + return ID +} + func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { - session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: string(s.Interval)}) + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.Interval}) } func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { @@ -56,7 +61,7 @@ func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, se var iw = types.IntervalWindow{Interval: s.Interval, Window: s.MovingAverageWindow} var ema = standardIndicatorSet.EWMA(iw) - session.Stream.OnKLineClosed(func(kline types.KLine) { + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { // skip k-lines from other symbols if kline.Symbol != s.Symbol { return @@ -65,23 +70,23 @@ func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, se change := kline.GetChange() // skip positive change - if change > 0 { + if change.Sign() > 0 { return } - if kline.Close > ema.Last() { - log.Warnf("kline close price %f is above EMA %s %f", kline.Close, ema.IntervalWindow, ema.Last()) + if kline.Close.Float64() > ema.Last() { + log.Warnf("kline close price %v is above EMA %s %f", kline.Close, ema.IntervalWindow, ema.Last()) return } - changeP := change / kline.Open + changeP := change.Div(kline.Open).Abs() - if s.MinDropPercentage != 0 { - if math.Abs(changeP) < math.Abs(s.MinDropPercentage.Float64()) { + if !s.MinDropPercentage.IsZero() { + if changeP.Compare(s.MinDropPercentage.Abs()) < 0 { return } - } else if s.MinDropChange != 0 { - if math.Abs(change) < math.Abs(s.MinDropChange.Float64()) { + } else if !s.MinDropChange.IsZero() { + if change.Abs().Compare(s.MinDropChange.Abs()) < 0 { return } } else { @@ -90,7 +95,7 @@ func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, se return } - quantity := s.BaseQuantity * (1.0 + math.Abs(changeP)) + quantity := s.BaseQuantity.Mul(fixedpoint.One.Add(changeP)) _, err := orderExecutor.SubmitOrders(ctx, types.SubmitOrder{ Symbol: kline.Symbol, Market: market, diff --git a/pkg/strategy/rebalance/strategy.go b/pkg/strategy/rebalance/strategy.go new file mode 100644 index 0000000000..45ce699b14 --- /dev/null +++ b/pkg/strategy/rebalance/strategy.go @@ -0,0 +1,220 @@ +package rebalance + +import ( + "context" + "fmt" + + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +const ID = "rebalance" + +var log = logrus.WithField("strategy", ID) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type Strategy struct { + Interval types.Interval `json:"interval"` + BaseCurrency string `json:"baseCurrency"` + TargetWeights types.ValueMap `json:"targetWeights"` + Threshold fixedpoint.Value `json:"threshold"` + DryRun bool `json:"dryRun"` + // max amount to buy or sell per order + MaxAmount fixedpoint.Value `json:"maxAmount"` + + activeOrderBook *bbgo.ActiveOrderBook +} + +func (s *Strategy) Initialize() error { + return nil +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) Validate() error { + if len(s.TargetWeights) == 0 { + return fmt.Errorf("targetWeights should not be empty") + } + + if !s.TargetWeights.Sum().Eq(fixedpoint.One) { + return fmt.Errorf("the sum of targetWeights should be 1") + } + + for currency, weight := range s.TargetWeights { + if weight.Float64() < 0 { + return fmt.Errorf("%s weight: %f should not less than 0", currency, weight.Float64()) + } + } + + if s.Threshold.Sign() < 0 { + return fmt.Errorf("threshold should not less than 0") + } + + if s.MaxAmount.Sign() < 0 { + return fmt.Errorf("maxAmount shoud not less than 0") + } + + return nil +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + session.Subscribe(types.KLineChannel, s.symbols()[0], types.SubscribeOptions{Interval: s.Interval}) +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + s.activeOrderBook = bbgo.NewActiveOrderBook("") + s.activeOrderBook.BindStream(session.UserDataStream) + + markets := session.Markets() + for _, symbol := range s.symbols() { + if _, ok := markets[symbol]; !ok { + return fmt.Errorf("exchange: %s does not supoort matket: %s", session.Exchange.Name(), symbol) + } + } + + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + s.rebalance(ctx, orderExecutor, session) + }) + + return nil +} + +func (s *Strategy) rebalance(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) { + // cancel active orders before rebalance + if err := session.Exchange.CancelOrders(ctx, s.activeOrderBook.Orders()...); err != nil { + log.WithError(err).Errorf("failed to cancel orders") + } + + submitOrders := s.generateSubmitOrders(ctx, session) + for _, order := range submitOrders { + log.Infof("generated submit order: %s", order.String()) + } + + if s.DryRun { + return + } + + createdOrders, err := orderExecutor.SubmitOrders(ctx, submitOrders...) + if err != nil { + log.WithError(err).Error("failed to submit orders") + return + } + + s.activeOrderBook.Add(createdOrders...) +} + +func (s *Strategy) prices(ctx context.Context, session *bbgo.ExchangeSession) types.ValueMap { + m := make(types.ValueMap) + + tickers, err := session.Exchange.QueryTickers(ctx, s.symbols()...) + if err != nil { + log.WithError(err).Error("failed to query tickers") + return nil + } + + for currency := range s.TargetWeights { + if currency == s.BaseCurrency { + m[s.BaseCurrency] = fixedpoint.One + continue + } + m[currency] = tickers[currency+s.BaseCurrency].Last + } + + return m +} + +func (s *Strategy) quantities(session *bbgo.ExchangeSession) types.ValueMap { + m := make(types.ValueMap) + + balances := session.GetAccount().Balances() + for currency := range s.TargetWeights { + m[currency] = balances[currency].Total() + } + + return m +} + +func (s *Strategy) generateSubmitOrders(ctx context.Context, session *bbgo.ExchangeSession) (submitOrders []types.SubmitOrder) { + prices := s.prices(ctx, session) + marketValues := prices.Mul(s.quantities(session)) + currentWeights := marketValues.Normalize() + + for currency, targetWeight := range s.TargetWeights { + if currency == s.BaseCurrency { + continue + } + + symbol := currency + s.BaseCurrency + currentWeight := currentWeights[currency] + currentPrice := prices[currency] + + log.Infof("%s price: %v, current weight: %v, target weight: %v", + symbol, + currentPrice, + currentWeight, + targetWeight) + + // calculate the difference between current weight and target weight + // if the difference is less than threshold, then we will not create the order + weightDifference := targetWeight.Sub(currentWeight) + if weightDifference.Abs().Compare(s.Threshold) < 0 { + log.Infof("%s weight distance |%v - %v| = |%v| less than the threshold: %v", + symbol, + currentWeight, + targetWeight, + weightDifference, + s.Threshold) + continue + } + + quantity := weightDifference.Mul(marketValues.Sum()).Div(currentPrice) + + side := types.SideTypeBuy + if quantity.Sign() < 0 { + side = types.SideTypeSell + quantity = quantity.Abs() + } + + if s.MaxAmount.Sign() > 0 { + quantity = bbgo.AdjustQuantityByMaxAmount(quantity, currentPrice, s.MaxAmount) + log.Infof("adjust the quantity %v (%s %s @ %v) by max amount %v", + quantity, + symbol, + side.String(), + currentPrice, + s.MaxAmount) + } + + log.Debugf("symbol: %v, quantity: %v", symbol, quantity) + + order := types.SubmitOrder{ + Symbol: symbol, + Side: side, + Type: types.OrderTypeLimit, + Quantity: quantity, + Price: currentPrice, + } + + submitOrders = append(submitOrders, order) + } + + return submitOrders +} + +func (s *Strategy) symbols() (symbols []string) { + for currency := range s.TargetWeights { + if currency == s.BaseCurrency { + continue + } + symbols = append(symbols, currency+s.BaseCurrency) + } + return symbols +} diff --git a/pkg/strategy/rsmaker/strategy.go b/pkg/strategy/rsmaker/strategy.go new file mode 100644 index 0000000000..fa7461df46 --- /dev/null +++ b/pkg/strategy/rsmaker/strategy.go @@ -0,0 +1,883 @@ +package rsmaker + +import ( + "context" + "fmt" + "math" + "time" + + "github.com/c9s/bbgo/pkg/indicator" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/service" + "github.com/c9s/bbgo/pkg/types" + "github.com/muesli/clusters" + "github.com/muesli/kmeans" +) + +// TODO: +// 1) add option for placing orders only when in neutral band +// 2) add option for only placing buy orders when price is below the SMA line + +const ID = "rsmaker" + +const stateKey = "state-v1" + +var defaultFeeRate = fixedpoint.NewFromFloat(0.001) +var notionModifier = fixedpoint.NewFromFloat(1.1) +var two = fixedpoint.NewFromInt(2) + +var log = logrus.WithField("strategy", ID) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type State struct { + Position *types.Position `json:"position,omitempty"` + ProfitStats types.ProfitStats `json:"profitStats,omitempty"` +} + +type BollingerSetting struct { + types.IntervalWindow + BandWidth float64 `json:"bandWidth"` +} + +type Strategy struct { + *bbgo.Graceful + *bbgo.Notifiability + *bbgo.Persistence + + Environment *bbgo.Environment + StandardIndicatorSet *bbgo.StandardIndicatorSet + Market types.Market + + // Symbol is the market symbol you want to trade + Symbol string `json:"symbol"` + + // Interval is how long do you want to update your order price and quantity + Interval types.Interval `json:"interval"` + + bbgo.QuantityOrAmount + + // Spread is the price spread from the middle price. + // For ask orders, the ask price is ((bestAsk + bestBid) / 2 * (1.0 + spread)) + // For bid orders, the bid price is ((bestAsk + bestBid) / 2 * (1.0 - spread)) + // Spread can be set by percentage or floating number. e.g., 0.1% or 0.001 + Spread fixedpoint.Value `json:"spread"` + + // BidSpread overrides the spread setting, this spread will be used for the buy order + BidSpread fixedpoint.Value `json:"bidSpread,omitempty"` + + // AskSpread overrides the spread setting, this spread will be used for the sell order + AskSpread fixedpoint.Value `json:"askSpread,omitempty"` + + // MinProfitSpread is the minimal order price spread from the current average cost. + // For long position, you will only place sell order above the price (= average cost * (1 + minProfitSpread)) + // For short position, you will only place buy order below the price (= average cost * (1 - minProfitSpread)) + MinProfitSpread fixedpoint.Value `json:"minProfitSpread"` + + // UseTickerPrice use the ticker api to get the mid price instead of the closed kline price. + // The back-test engine is kline-based, so the ticker price api is not supported. + // Turn this on if you want to do real trading. + UseTickerPrice bool `json:"useTickerPrice"` + + // MaxExposurePosition is the maximum position you can hold + // +10 means you can hold 10 ETH long position by maximum + // -10 means you can hold -10 ETH short position by maximum + MaxExposurePosition fixedpoint.Value `json:"maxExposurePosition"` + + // DynamicExposurePositionScale is used to define the exposure position range with the given percentage + // when DynamicExposurePositionScale is set, + // your MaxExposurePosition will be calculated dynamically according to the bollinger band you set. + DynamicExposurePositionScale *bbgo.PercentageScale `json:"dynamicExposurePositionScale"` + + // Long means your position will be long position + // Currently not used yet + Long *bool `json:"long,omitempty"` + + // Short means your position will be long position + // Currently not used yet + Short *bool `json:"short,omitempty"` + + // DisableShort means you can don't want short position during the market making + // Set to true if you want to hold more spot during market making. + DisableShort bool `json:"disableShort"` + + // BuyBelowNeutralSMA if true, the market maker will only place buy order when the current price is below the neutral band SMA. + BuyBelowNeutralSMA bool `json:"buyBelowNeutralSMA"` + + // NeutralBollinger is the smaller range of the bollinger band + // If price is in this band, it usually means the price is oscillating. + // If price goes out of this band, we tend to not place sell orders or buy orders + NeutralBollinger *BollingerSetting `json:"neutralBollinger"` + + // DefaultBollinger is the wide range of the bollinger band + // for controlling your exposure position + DefaultBollinger *BollingerSetting `json:"defaultBollinger"` + + // DowntrendSkew is the order quantity skew for normal downtrend band. + // The price is still in the default bollinger band. + // greater than 1.0 means when placing buy order, place sell order with less quantity + // less than 1.0 means when placing sell order, place buy order with less quantity + DowntrendSkew fixedpoint.Value `json:"downtrendSkew"` + + // UptrendSkew is the order quantity skew for normal uptrend band. + // The price is still in the default bollinger band. + // greater than 1.0 means when placing buy order, place sell order with less quantity + // less than 1.0 means when placing sell order, place buy order with less quantity + UptrendSkew fixedpoint.Value `json:"uptrendSkew"` + + // TradeInBand + // When this is on, places orders only when the current price is in the bollinger band. + TradeInBand bool `json:"tradeInBand"` + + // ShadowProtection is used to avoid placing bid order when price goes down strongly (without shadow) + ShadowProtection bool `json:"shadowProtection"` + ShadowProtectionRatio fixedpoint.Value `json:"shadowProtectionRatio"` + + bbgo.SmartStops + + session *bbgo.ExchangeSession + book *types.StreamOrderBook + + state *State + + activeMakerOrders *bbgo.ActiveOrderBook + orderStore *bbgo.OrderStore + tradeCollector *bbgo.TradeCollector + + groupID uint32 + + stopC chan struct{} + + // defaultBoll is the BOLLINGER indicator we used for predicting the price. + defaultBoll *indicator.BOLL + + // neutralBoll is the neutral price section + neutralBoll *indicator.BOLL + + // StrategyController + status types.StrategyStatus +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) Initialize() error { + return s.SmartStops.InitializeStopControllers(s.Symbol) +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{ + Interval: s.Interval, + }) + + //session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{ + // Interval: types.Interval12h.String(), + //}) + + //if s.DefaultBollinger != nil && s.DefaultBollinger.Interval != "" { + // session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{ + // Interval: string(s.DefaultBollinger.Interval), + // }) + //} + // + //if s.NeutralBollinger != nil && s.NeutralBollinger.Interval != "" { + // session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{ + // Interval: string(s.NeutralBollinger.Interval), + // }) + //} + + //s.SmartStops.Subscribe(session) +} + +func (s *Strategy) Validate() error { + if len(s.Symbol) == 0 { + return errors.New("symbol is required") + } + + return nil +} + +func (s *Strategy) CurrentPosition() *types.Position { + return s.state.Position +} + +func (s *Strategy) ClosePosition(ctx context.Context, percentage fixedpoint.Value) error { + base := s.state.Position.GetBase() + if base.IsZero() { + return fmt.Errorf("no opened %s position", s.state.Position.Symbol) + } + + // make it negative + quantity := base.Mul(percentage).Abs() + side := types.SideTypeBuy + if base.Sign() > 0 { + side = types.SideTypeSell + } + + if quantity.Compare(s.Market.MinQuantity) < 0 { + return fmt.Errorf("order quantity %v is too small, less than %v", quantity, s.Market.MinQuantity) + } + + submitOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: side, + Type: types.OrderTypeMarket, + Quantity: quantity, + Market: s.Market, + } + + s.Notify("Submitting %s %s order to close position by %v", s.Symbol, side.String(), percentage, submitOrder) + + createdOrders, err := s.session.Exchange.SubmitOrders(ctx, submitOrder) + if err != nil { + log.WithError(err).Errorf("can not place position close order") + } + + s.orderStore.Add(createdOrders...) + s.activeMakerOrders.Add(createdOrders...) + s.tradeCollector.Process() + + return err +} + +// StrategyController + +func (s *Strategy) GetStatus() types.StrategyStatus { + return s.status +} + +func (s *Strategy) Suspend(ctx context.Context) error { + s.status = types.StrategyStatusStopped + + // Cancel all order + if err := s.activeMakerOrders.GracefulCancel(ctx, s.session.Exchange); err != nil { + log.WithError(err).Errorf("graceful cancel order error") + s.Notify("graceful cancel order error") + } else { + s.Notify("All orders cancelled.") + } + + s.tradeCollector.Process() + + // Save state + if err := s.SaveState(); err != nil { + log.WithError(err).Errorf("can not save state: %+v", s.state) + } else { + log.Infof("%s position is saved.", s.Symbol) + } + + return nil +} + +func (s *Strategy) Resume(ctx context.Context) error { + s.status = types.StrategyStatusRunning + + return nil +} + +//func (s *Strategy) EmergencyStop(ctx context.Context) error { +// // Close 100% position +// percentage, _ := fixedpoint.NewFromString("100%") +// err := s.ClosePosition(ctx, percentage) +// +// // Suspend strategy +// _ = s.Suspend(ctx) +// +// return err +//} + +func (s *Strategy) SaveState() error { + if err := s.Persistence.Save(s.state, ID, s.Symbol, stateKey); err != nil { + return err + } + + log.Infof("state is saved => %+v", s.state) + return nil +} + +func (s *Strategy) LoadState() error { + var state State + + // load position + if err := s.Persistence.Load(&state, ID, s.Symbol, stateKey); err != nil { + if err != service.ErrPersistenceNotExists { + return err + } + + s.state = &State{} + } else { + s.state = &state + log.Infof("state is restored: %+v", s.state) + } + + // if position is nil, we need to allocate a new position for calculation + if s.state.Position == nil { + s.state.Position = types.NewPositionFromMarket(s.Market) + } + + // init profit states + s.state.ProfitStats.Symbol = s.Market.Symbol + s.state.ProfitStats.BaseCurrency = s.Market.BaseCurrency + s.state.ProfitStats.QuoteCurrency = s.Market.QuoteCurrency + if s.state.ProfitStats.AccumulatedSince == 0 { + s.state.ProfitStats.AccumulatedSince = time.Now().Unix() + } + + return nil +} + +func (s *Strategy) getCurrentAllowedExposurePosition(bandPercentage float64) (fixedpoint.Value, error) { + if s.DynamicExposurePositionScale != nil { + v, err := s.DynamicExposurePositionScale.Scale(bandPercentage) + if err != nil { + return fixedpoint.Zero, err + } + return fixedpoint.NewFromFloat(v), nil + } + + return s.MaxExposurePosition, nil +} + +func (s *Strategy) placeOrders(ctx context.Context, orderExecutor bbgo.OrderExecutor, midPrice fixedpoint.Value, klines []*types.KLine) { + //bidSpread := s.Spread + //if s.BidSpread.Sign() > 0 { + // bidSpread = s.BidSpread + //} + // + //askSpread := s.Spread + //if s.AskSpread.Sign() > 0 { + // askSpread = s.AskSpread + //} + // preprocessing + max := 0. + min := 100000. + + mv := 0. + for x := 0; x < 50; x++ { + if klines[x].High.Float64() > max { + max = klines[x].High.Float64() + } + if klines[x].Low.Float64() < min { + min = klines[x].High.Float64() + } + + mv += klines[x].Volume.Float64() + } + mv = mv / 50 + + //logrus.Info(max, min) + // set up a random two-dimensional data set (float64 values between 0.0 and 1.0) + var d clusters.Observations + for x := 0; x < 50; x++ { + //if klines[x].High.Float64() < max || klines[x].Low.Float64() > min { + if klines[x].Volume.Float64() > mv*0.3 { + d = append(d, clusters.Coordinates{ + klines[x].High.Float64(), + klines[x].Low.Float64(), + //klines[x].Open.Float64(), + //klines[x].Close.Float64(), + //klines[x].Volume.Float64(), + }) + } + //} + + } + log.Info(len(d)) + + // Partition the data points into 2 clusters + km := kmeans.New() + clusters, err := km.Partition(d, 3) + + //for _, c := range clusters { + //fmt.Printf("Centered at x: %.2f y: %.2f\n", c.Center[0], c.Center[1]) + //fmt.Printf("Matching data points: %+v\n\n", c.Observations) + //} + // clustered virtual kline_1's mid price + //vk1mp := fixedpoint.NewFromFloat((clusters[0].Center[0] + clusters[0].Center[1]) / 2.) + // clustered virtual kline_2's mid price + //vk2mp := fixedpoint.NewFromFloat((clusters[1].Center[0] + clusters[1].Center[1]) / 2.) + // clustered virtual kline_3's mid price + //vk3mp := fixedpoint.NewFromFloat((clusters[2].Center[0] + clusters[2].Center[1]) / 2.) + + // clustered virtual kline_1's high price + vk1hp := fixedpoint.NewFromFloat(clusters[0].Center[0]) + // clustered virtual kline_2's high price + vk2hp := fixedpoint.NewFromFloat(clusters[1].Center[0]) + // clustered virtual kline_3's high price + vk3hp := fixedpoint.NewFromFloat(clusters[2].Center[0]) + + // clustered virtual kline_1's low price + vk1lp := fixedpoint.NewFromFloat(clusters[0].Center[1]) + // clustered virtual kline_2's low price + vk2lp := fixedpoint.NewFromFloat(clusters[1].Center[1]) + // clustered virtual kline_3's low price + vk3lp := fixedpoint.NewFromFloat(clusters[2].Center[1]) + + askPrice := fixedpoint.NewFromFloat(math.Max(math.Max(vk1hp.Float64(), vk2hp.Float64()), vk3hp.Float64())) //fixedpoint.NewFromFloat(math.Max(math.Max(vk1mp.Float64(), vk2mp.Float64()), vk3mp.Float64())) + bidPrice := fixedpoint.NewFromFloat(math.Min(math.Min(vk1lp.Float64(), vk2lp.Float64()), vk3lp.Float64())) //fixedpoint.NewFromFloat(math.Min(math.Min(vk1mp.Float64(), vk2mp.Float64()), vk3mp.Float64())) + + //if vk1mp.Compare(vk2mp) > 0 { + // askPrice = vk1mp //.Mul(fixedpoint.NewFromFloat(1.001)) + // bidPrice = vk2mp //.Mul(fixedpoint.NewFromFloat(0.999)) + //} else if vk1mp.Compare(vk2mp) < 0 { + // askPrice = vk2mp //.Mul(fixedpoint.NewFromFloat(1.001)) + // bidPrice = vk1mp //.Mul(fixedpoint.NewFromFloat(0.999)) + //} + //midPrice.Mul(fixedpoint.One.Add(askSpread)) + //midPrice.Mul(fixedpoint.One.Sub(bidSpread)) + base := s.state.Position.GetBase() + //balances := s.session.GetAccount().Balances() + + //log.Infof("mid price:%v spread: %s ask:%v bid: %v position: %s", + // midPrice, + // s.Spread.Percentage(), + // askPrice, + // bidPrice, + // s.state.Position, + //) + canSell := true + canBuy := true + + //predMidPrice := (askPrice + bidPrice) / 2. + + //if midPrice.Float64() > predMidPrice.Float64() { + // bidPrice = predMidPrice.Mul(fixedpoint.NewFromFloat(0.999)) + //} + // + //if midPrice.Float64() < predMidPrice.Float64() { + // askPrice = predMidPrice.Mul(fixedpoint.NewFromFloat(1.001)) + //} + // + //if midPrice.Float64() > askPrice.Float64() { + // canBuy = false + // askPrice = midPrice.Mul(fixedpoint.NewFromFloat(1.001)) + //} + // + //if midPrice.Float64() < bidPrice.Float64() { + // canSell = false + // bidPrice = midPrice.Mul(fixedpoint.NewFromFloat(0.999)) + //} + + sellQuantity := s.QuantityOrAmount.CalculateQuantity(askPrice) + buyQuantity := s.QuantityOrAmount.CalculateQuantity(bidPrice) + + sellOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeSell, + Type: types.OrderTypeLimitMaker, + Quantity: sellQuantity, + Price: askPrice, + Market: s.Market, + GroupID: s.groupID, + } + buyOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeBuy, + Type: types.OrderTypeLimitMaker, + Quantity: buyQuantity, + Price: bidPrice, + Market: s.Market, + GroupID: s.groupID, + } + + var submitBuyOrders []types.SubmitOrder + var submitSellOrders []types.SubmitOrder + + //baseBalance, hasBaseBalance := balances[s.Market.BaseCurrency] + //quoteBalance, hasQuoteBalance := balances[s.Market.QuoteCurrency] + + downBand := s.defaultBoll.LastDownBand() + upBand := s.defaultBoll.LastUpBand() + sma := s.defaultBoll.LastSMA() + log.Infof("bollinger band: up %f sma %f down %f", upBand, sma, downBand) + + bandPercentage := calculateBandPercentage(upBand, downBand, sma, midPrice.Float64()) + log.Infof("mid price band percentage: %v", bandPercentage) + + maxExposurePosition, err := s.getCurrentAllowedExposurePosition(bandPercentage) + if err != nil { + log.WithError(err).Errorf("can not calculate CurrentAllowedExposurePosition") + return + } + + log.Infof("calculated max exposure position: %v", maxExposurePosition) + + if maxExposurePosition.Sign() > 0 && base.Compare(maxExposurePosition) > 0 { + canBuy = false + } + + if maxExposurePosition.Sign() > 0 { + if s.Long != nil && *s.Long && base.Sign() < 0 { + canSell = false + } else if base.Compare(maxExposurePosition.Neg()) < 0 { + canSell = false + } + } + + //if s.ShadowProtection && kline != nil { + // switch kline.Direction() { + // case types.DirectionDown: + // shadowHeight := kline.GetLowerShadowHeight() + // shadowRatio := kline.GetLowerShadowRatio() + // if shadowHeight.IsZero() && shadowRatio.Compare(s.ShadowProtectionRatio) < 0 { + // log.Infof("%s shadow protection enabled, lower shadow ratio %v < %v", s.Symbol, shadowRatio, s.ShadowProtectionRatio) + // canBuy = false + // } + // case types.DirectionUp: + // shadowHeight := kline.GetUpperShadowHeight() + // shadowRatio := kline.GetUpperShadowRatio() + // if shadowHeight.IsZero() || shadowRatio.Compare(s.ShadowProtectionRatio) < 0 { + // log.Infof("%s shadow protection enabled, upper shadow ratio %v < %v", s.Symbol, shadowRatio, s.ShadowProtectionRatio) + // canSell = false + // } + // } + //} + + // Apply quantity skew + // CASE #1: + // WHEN: price is in the neutral bollginer band (window 1) == neutral + // THEN: we don't apply skew + // CASE #2: + // WHEN: price is in the upper band (window 2 > price > window 1) == upTrend + // THEN: we apply upTrend skew + // CASE #3: + // WHEN: price is in the lower band (window 2 < price < window 1) == downTrend + // THEN: we apply downTrend skew + // CASE #4: + // WHEN: price breaks the lower band (price < window 2) == strongDownTrend + // THEN: we apply strongDownTrend skew + // CASE #5: + // WHEN: price breaks the upper band (price > window 2) == strongUpTrend + // THEN: we apply strongUpTrend skew + //if s.TradeInBand { + // if !inBetween(midPrice.Float64(), s.neutralBoll.LastDownBand(), s.neutralBoll.LastUpBand()) { + // log.Infof("tradeInBand is set, skip placing orders when the price is outside of the band") + // return + // } + //} + + //revmacd := s.detectPriceTrend(s.neutralBoll, midPrice.Float64()) + //switch revmacd { + //case NeutralTrend: + // // do nothing + // + //case UpTrend: + // skew := s.UptrendSkew + // buyOrder.Quantity = fixedpoint.Max(s.Market.MinQuantity, sellOrder.Quantity.Mul(skew)) + // + //case DownTrend: + // skew := s.DowntrendSkew + // ratio := fixedpoint.One.Div(skew) + // sellOrder.Quantity = fixedpoint.Max(s.Market.MinQuantity, buyOrder.Quantity.Mul(ratio)) + // + //} + + //if !hasQuoteBalance || buyOrder.Quantity.Mul(buyOrder.Price).Compare(quoteBalance.Available) > 0 { + // canBuy = false + //} + // + //if !hasBaseBalance || sellOrder.Quantity.Compare(baseBalance.Available) > 0 { + // canSell = false + //} + + //if midPrice.Compare(s.state.Position.AverageCost.Mul(fixedpoint.One.Add(s.MinProfitSpread))) < 0 { + // canSell = false + //} + + //if s.Long != nil && *s.Long && base.Sub(sellOrder.Quantity).Sign() < 0 { + // canSell = false + //} + // + //if s.BuyBelowNeutralSMA && midPrice.Float64() > s.neutralBoll.LastSMA() { + // canBuy = false + //} + + if canSell { + submitSellOrders = append(submitSellOrders, sellOrder) + //sellOrder = s.adjustOrderPrice(sellOrder, false) + //submitSellOrders = append(submitSellOrders, sellOrder) + //sellOrder = s.adjustOrderPrice(sellOrder, false) + //submitSellOrders = append(submitSellOrders, sellOrder) + } + if canBuy { + submitBuyOrders = append(submitBuyOrders, buyOrder) + //buyOrder = s.adjustOrderPrice(buyOrder, true) + //submitBuyOrders = append(submitBuyOrders, buyOrder) + //buyOrder = s.adjustOrderPrice(buyOrder, true) + //submitBuyOrders = append(submitBuyOrders, buyOrder) + } + + // condition for lower the average cost + /* + if midPrice < s.state.Position.AverageCost.MulFloat64(1.0-s.MinProfitSpread.Float64()) && canBuy { + submitOrders = append(submitOrders, buyOrder) + } + */ + + for i := range submitBuyOrders { + submitBuyOrders[i] = s.adjustOrderQuantity(submitBuyOrders[i]) + } + + for i := range submitSellOrders { + submitSellOrders[i] = s.adjustOrderQuantity(submitSellOrders[i]) + } + + createdBuyOrders, err := orderExecutor.SubmitOrders(ctx, submitBuyOrders...) + if err != nil { + log.WithError(err).Errorf("can not place ping pong orders") + } + s.orderStore.Add(createdBuyOrders...) + s.activeMakerOrders.Add(createdBuyOrders...) + + createdSellOrders, err := orderExecutor.SubmitOrders(ctx, submitSellOrders...) + if err != nil { + log.WithError(err).Errorf("can not place ping pong orders") + } + s.orderStore.Add(createdSellOrders...) + s.activeMakerOrders.Add(createdSellOrders...) +} + +type PriceTrend string + +const ( + NeutralTrend PriceTrend = "neutral" + UpTrend PriceTrend = "upTrend" + DownTrend PriceTrend = "downTrend" + UnknownTrend PriceTrend = "unknown" +) + +func (s *Strategy) detectPriceTrend(inc *indicator.BOLL, price float64) PriceTrend { + if inBetween(price, inc.LastDownBand(), inc.LastUpBand()) { + return NeutralTrend + } + + if price < inc.LastDownBand() { + return DownTrend + } + + if price > inc.LastUpBand() { + return UpTrend + } + + return UnknownTrend +} + +func (s *Strategy) adjustOrderQuantity(submitOrder types.SubmitOrder) types.SubmitOrder { + if submitOrder.Quantity.Mul(submitOrder.Price).Compare(s.Market.MinNotional) < 0 { + submitOrder.Quantity = bbgo.AdjustFloatQuantityByMinAmount(submitOrder.Quantity, submitOrder.Price, s.Market.MinNotional.Mul(notionModifier)) + } + + if submitOrder.Quantity.Compare(s.Market.MinQuantity) < 0 { + submitOrder.Quantity = fixedpoint.Max(submitOrder.Quantity, s.Market.MinQuantity) + } + + return submitOrder +} + +func (s *Strategy) adjustOrderPrice(submitOrder types.SubmitOrder, side bool) types.SubmitOrder { + + if side { + submitOrder.Price = submitOrder.Price.Mul(fixedpoint.NewFromFloat(0.995)) + } else { + submitOrder.Price = submitOrder.Price.Mul(fixedpoint.NewFromFloat(1.005)) + } + + return submitOrder +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + // StrategyController + s.status = types.StrategyStatusRunning + + //if s.DisableShort { + // s.Long = &[]bool{true}[0] + //} + // + //if s.MinProfitSpread.IsZero() { + // s.MinProfitSpread = fixedpoint.NewFromFloat(0.001) + //} + // + //if s.UptrendSkew.IsZero() { + // s.UptrendSkew = fixedpoint.NewFromFloat(1.0 / 1.2) + //} + // + //if s.DowntrendSkew.IsZero() { + // s.DowntrendSkew = fixedpoint.NewFromFloat(1.2) + //} + // + //if s.ShadowProtectionRatio.IsZero() { + // s.ShadowProtectionRatio = fixedpoint.NewFromFloat(0.01) + //} + + // initial required information + s.session = session + s.neutralBoll = s.StandardIndicatorSet.BOLL(s.NeutralBollinger.IntervalWindow, s.NeutralBollinger.BandWidth) + s.defaultBoll = s.StandardIndicatorSet.BOLL(s.DefaultBollinger.IntervalWindow, s.DefaultBollinger.BandWidth) + + // calculate group id for orders + instanceID := fmt.Sprintf("%s-%s", ID, s.Symbol) + //s.groupID = max.GenerateGroupID(instanceID) + log.Infof("using group id %d from fnv(%s)", s.groupID, instanceID) + + // restore state + if err := s.LoadState(); err != nil { + return err + } + + s.state.Position.Strategy = ID + s.state.Position.StrategyInstanceID = instanceID + + //s.stopC = make(chan struct{}) + + s.activeMakerOrders = bbgo.NewActiveOrderBook(s.Symbol) + s.activeMakerOrders.BindStream(session.UserDataStream) + + s.orderStore = bbgo.NewOrderStore(s.Symbol) + s.orderStore.BindStream(session.UserDataStream) + + s.tradeCollector = bbgo.NewTradeCollector(s.Symbol, s.state.Position, s.orderStore) + + //s.tradeCollector.OnTrade(func(trade types.Trade, profit, netProfit fixedpoint.Value) { + // // StrategyController + // if s.status != types.StrategyStatusRunning { + // return + // } + // + // s.Notifiability.Notify(trade) + // s.state.ProfitStats.AddTrade(trade) + // + // if profit.Compare(fixedpoint.Zero) == 0 { + // s.Environment.RecordPosition(s.state.Position, trade, nil) + // } else { + // log.Infof("%s generated profit: %v", s.Symbol, profit) + // p := s.state.Position.NewProfit(trade, profit, netProfit) + // p.Strategy = ID + // p.StrategyInstanceID = instanceID + // s.Notify(&p) + // + // s.state.ProfitStats.AddProfit(p) + // s.Notify(&s.state.ProfitStats) + // + // s.Environment.RecordPosition(s.state.Position, trade, &p) + // } + //}) + // + //s.tradeCollector.OnPositionUpdate(func(position *types.Position) { + // log.Infof("position changed: %s", s.state.Position) + // s.Notify(s.state.Position) + //}) + + s.tradeCollector.BindStream(session.UserDataStream) + + //s.SmartStops.RunStopControllers(ctx, session, s.tradeCollector) + + //session.UserDataStream.OnStart(func() { + //if s.UseTickerPrice { + // ticker, err := s.session.Exchange.QueryTicker(ctx, s.Symbol) + // if err != nil { + // return + // } + // + // midPrice := ticker.Buy.Add(ticker.Sell).Div(two) + // s.placeOrders(ctx, orderExecutor, midPrice, nil) + //} else { + // if price, ok := session.LastPrice(s.Symbol); ok { + // s.placeOrders(ctx, orderExecutor, price, nil) + // } + //} + //}) + + var klines []*types.KLine + + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + // StrategyController + if s.status != types.StrategyStatusRunning { + return + } + + //if kline.Symbol != s.Symbol || kline.Interval != s.Interval { + // return + //} + + if kline.Interval == s.Interval { + klines = append(klines, &kline) + } + if len(klines) > 50 { + //if s.UseTickerPrice { + // ticker, err := s.session.Exchange.QueryTicker(ctx, s.Symbol) + // if err != nil { + // return + // } + // + // midPrice := ticker.Buy.Add(ticker.Sell).Div(two) + // log.Infof("using ticker price: bid %v / ask %v, mid price %v", ticker.Buy, ticker.Sell, midPrice) + // s.placeOrders(ctx, orderExecutor, midPrice, klines[len(klines)-100:]) + // s.tradeCollector.Process() + //} + //else { + if kline.Interval == s.Interval { + + //if s.state.Position.AverageCost.Div(kline.Close).Float64() < 0.999 { + // s.ClosePosition(ctx, fixedpoint.One) + // s.tradeCollector.Process() + //} + + if err := s.activeMakerOrders.GracefulCancel(ctx, s.session.Exchange); err != nil { + log.WithError(err).Errorf("graceful cancel order error") + } + + // check if there is a canceled order had partially filled. + s.tradeCollector.Process() + + s.placeOrders(ctx, orderExecutor, kline.Close, klines[len(klines)-50:]) + s.tradeCollector.Process() + } + //} + } + + }) + + // s.book = types.NewStreamBook(s.Symbol) + // s.book.BindStreamForBackground(session.MarketDataStream) + + //s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { + // //defer wg.Done() + // //close(s.stopC) + // + // if err := s.activeMakerOrders.GracefulCancel(ctx, s.session.Exchange); err != nil { + // log.WithError(err).Errorf("graceful cancel order error") + // } + // + // s.tradeCollector.Process() + // + // if err := s.SaveState(); err != nil { + // log.WithError(err).Errorf("can not save state: %+v", s.state) + // } + //}) + + return nil +} + +func calculateBandPercentage(up, down, sma, midPrice float64) float64 { + if midPrice < sma { + // should be negative percentage + return (midPrice - sma) / math.Abs(sma-down) + } else if midPrice > sma { + // should be positive percentage + return (midPrice - sma) / math.Abs(up-sma) + } + + return 0.0 +} + +func inBetween(x, a, b float64) bool { + return a < x && x < b +} diff --git a/pkg/strategy/schedule/strategy.go b/pkg/strategy/schedule/strategy.go new file mode 100644 index 0000000000..82ad3ecf5b --- /dev/null +++ b/pkg/strategy/schedule/strategy.go @@ -0,0 +1,183 @@ +package schedule + +import ( + "context" + + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/types" +) + +const ID = "schedule" + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type Strategy struct { + Market types.Market + + // StandardIndicatorSet contains the standard indicators of a market (symbol) + // This field will be injected automatically since we defined the Symbol field. + *bbgo.StandardIndicatorSet + + // Interval is the period that you want to submit order + Interval types.Interval `json:"interval"` + + // Symbol is the symbol of the market + Symbol string `json:"symbol"` + + // Side is the order side type, which can be buy or sell + Side types.SideType `json:"side,omitempty"` + + bbgo.QuantityOrAmount + + BelowMovingAverage *bbgo.MovingAverageSettings `json:"belowMovingAverage,omitempty"` + + AboveMovingAverage *bbgo.MovingAverageSettings `json:"aboveMovingAverage,omitempty"` +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.Interval}) + if s.BelowMovingAverage != nil { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.BelowMovingAverage.Interval}) + } + if s.AboveMovingAverage != nil { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.AboveMovingAverage.Interval}) + } +} + +func (s *Strategy) Validate() error { + if err := s.QuantityOrAmount.Validate(); err != nil { + return err + } + + return nil +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + if s.StandardIndicatorSet == nil { + return errors.New("StandardIndicatorSet can not be nil, injection failed?") + } + + var belowMA types.Float64Indicator + var aboveMA types.Float64Indicator + var err error + if s.BelowMovingAverage != nil { + belowMA, err = s.BelowMovingAverage.Indicator(s.StandardIndicatorSet) + if err != nil { + return err + } + } + + if s.AboveMovingAverage != nil { + aboveMA, err = s.AboveMovingAverage.Indicator(s.StandardIndicatorSet) + if err != nil { + return err + } + } + + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + if kline.Symbol != s.Symbol { + return + } + + if kline.Interval != s.Interval { + return + } + + closePrice := kline.Close + closePriceF := closePrice.Float64() + quantity := s.QuantityOrAmount.CalculateQuantity(closePrice) + side := s.Side + + if s.BelowMovingAverage != nil || s.AboveMovingAverage != nil { + + match := false + // if any of the conditions satisfies then we execute order + if belowMA != nil && closePriceF < belowMA.Last() { + match = true + if s.BelowMovingAverage != nil { + if s.BelowMovingAverage.Side != nil { + side = *s.BelowMovingAverage.Side + } + + // override the default quantity or amount + if s.BelowMovingAverage.QuantityOrAmount.IsSet() { + quantity = s.BelowMovingAverage.QuantityOrAmount.CalculateQuantity(closePrice) + } + } + } else if aboveMA != nil && closePriceF > aboveMA.Last() { + match = true + if s.AboveMovingAverage != nil { + if s.AboveMovingAverage.Side != nil { + side = *s.AboveMovingAverage.Side + } + + if s.AboveMovingAverage.QuantityOrAmount.IsSet() { + quantity = s.AboveMovingAverage.QuantityOrAmount.CalculateQuantity(closePrice) + } + } + } + + if !match { + bbgo.Notify("skip, the %s closed price %v is below or above moving average", s.Symbol, closePrice) + return + } + } + + // calculate quote quantity for balance checking + quoteQuantity := quantity.Mul(closePrice) + + // execute orders + switch side { + case types.SideTypeBuy: + quoteBalance, ok := session.GetAccount().Balance(s.Market.QuoteCurrency) + if !ok { + log.Errorf("can not place scheduled %s order, quote balance %s is empty", s.Symbol, s.Market.QuoteCurrency) + return + } + + if quoteBalance.Available.Compare(quoteQuantity) < 0 { + bbgo.Notify("Can not place scheduled %s order: quote balance %s is not enough: %v < %v", s.Symbol, s.Market.QuoteCurrency, quoteBalance.Available, quoteQuantity) + log.Errorf("can not place scheduled %s order: quote balance %s is not enough: %v < %v", s.Symbol, s.Market.QuoteCurrency, quoteBalance.Available, quoteQuantity) + return + } + + case types.SideTypeSell: + baseBalance, ok := session.GetAccount().Balance(s.Market.BaseCurrency) + if !ok { + log.Errorf("can not place scheduled %s order, base balance %s is empty", s.Symbol, s.Market.BaseCurrency) + return + } + + if baseBalance.Available.Compare(quantity) < 0 { + bbgo.Notify("Can not place scheduled %s order: base balance %s is not enough: %v < %v", s.Symbol, s.Market.QuoteCurrency, baseBalance.Available, quantity) + log.Errorf("can not place scheduled %s order: base balance %s is not enough: %v < %v", s.Symbol, s.Market.QuoteCurrency, baseBalance.Available, quantity) + return + } + + } + + bbgo.Notify("Submitting scheduled %s order with quantity %v at price %v", s.Symbol, quantity, closePrice) + _, err := orderExecutor.SubmitOrders(ctx, types.SubmitOrder{ + Symbol: s.Symbol, + Side: side, + Type: types.OrderTypeMarket, + Quantity: quantity, + Market: s.Market, + }) + if err != nil { + bbgo.Notify("Can not place scheduled %s order: submit error %s", s.Symbol, err.Error()) + log.WithError(err).Errorf("can not place scheduled %s order error", s.Symbol) + } + }) + + return nil +} diff --git a/pkg/strategy/skeleton/strategy.go b/pkg/strategy/skeleton/strategy.go index 07b3636ded..21079e7421 100644 --- a/pkg/strategy/skeleton/strategy.go +++ b/pkg/strategy/skeleton/strategy.go @@ -3,45 +3,77 @@ package skeleton import ( "context" - log "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus" "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" ) +const ID = "skeleton" + +var log = logrus.WithField("strategy", ID) + func init() { - bbgo.RegisterStrategy("skeleton", &Strategy{}) + bbgo.RegisterStrategy(ID, &Strategy{}) } type Strategy struct { Symbol string `json:"symbol"` +} - types.Market +func (s *Strategy) ID() string { + return ID } func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + log.Infof("subscribe %s", s.Symbol) session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: "1m"}) } +var Ten = fixedpoint.NewFromInt(10) + +// This strategy simply spent all available quote currency to buy the symbol whenever kline gets closed func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { - session.Stream.OnKLineClosed(func(kline types.KLine) { - quoteBalance, ok := session.Account.Balance(s.Market.QuoteCurrency) + market, ok := session.Market(s.Symbol) + if !ok { + log.Warnf("fetch market fail %s", s.Symbol) + return nil + } + callback := func(kline types.KLine) { + quoteBalance, ok := session.GetAccount().Balance(market.QuoteCurrency) + if !ok { + return + } + quantityAmount := quoteBalance.Available + if quantityAmount.Sign() <= 0 || quantityAmount.Compare(Ten) < 0 { + return + } + + currentPrice, ok := session.LastPrice(s.Symbol) if !ok { return } - _ = quoteBalance + + totalQuantity := quantityAmount.Div(currentPrice) _, err := orderExecutor.SubmitOrders(ctx, types.SubmitOrder{ Symbol: kline.Symbol, Side: types.SideTypeBuy, Type: types.OrderTypeMarket, - Quantity: 0.01, + Price: currentPrice, + Quantity: totalQuantity, }) if err != nil { log.WithError(err).Error("submit order error") } + } + session.UserDataStream.OnStart(func() { + log.Infof("connected") }) + session.MarketDataStream.OnKLineClosed(callback) + return nil } diff --git a/pkg/strategy/supertrend/strategy.go b/pkg/strategy/supertrend/strategy.go new file mode 100644 index 0000000000..5d70edfa9a --- /dev/null +++ b/pkg/strategy/supertrend/strategy.go @@ -0,0 +1,403 @@ +package supertrend + +import ( + "context" + "fmt" + "github.com/c9s/bbgo/pkg/util" + "sync" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" +) + +const ID = "supertrend" + +const stateKey = "state-v1" + +var log = logrus.WithField("strategy", ID) + +func init() { + // Register the pointer of the strategy struct, + // so that bbgo knows what struct to be used to unmarshal the configs (YAML or JSON) + // Note: built-in strategies need to imported manually in the bbgo cmd package. + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type Strategy struct { + *bbgo.Graceful + *bbgo.Persistence + + Environment *bbgo.Environment + session *bbgo.ExchangeSession + Market types.Market + + // persistence fields + Position *types.Position `json:"position,omitempty" persistence:"position"` + ProfitStats *types.ProfitStats `json:"profitStats,omitempty" persistence:"profit_stats"` + + // Order and trade + orderExecutor *bbgo.GeneralOrderExecutor + + // groupID is the group ID used for the strategy instance for canceling orders + groupID uint32 + + stopC chan struct{} + + // Symbol is the market symbol you want to trade + Symbol string `json:"symbol"` + + // Interval is how long do you want to update your order price and quantity + Interval types.Interval `json:"interval"` + + // FastDEMAWindow DEMA window for checking breakout + FastDEMAWindow int `json:"fastDEMAWindow"` + // SlowDEMAWindow DEMA window for checking breakout + SlowDEMAWindow int `json:"slowDEMAWindow"` + fastDEMA *indicator.DEMA + slowDEMA *indicator.DEMA + + // SuperTrend indicator + // SuperTrend SuperTrend `json:"superTrend"` + Supertrend *indicator.Supertrend + // SupertrendWindow ATR window for calculation of supertrend + SupertrendWindow int `json:"supertrendWindow"` + // SupertrendMultiplier ATR multiplier for calculation of supertrend + SupertrendMultiplier float64 `json:"supertrendMultiplier"` + + // Leverage + Leverage float64 `json:"leverage"` + + // TakeProfitMultiplier TP according to ATR multiple, 0 to disable this + TakeProfitMultiplier float64 `json:"takeProfitMultiplier"` + + // StopLossByTriggeringK Set SL price to the low of the triggering Kline + StopLossByTriggeringK bool `json:"stopLossByTriggeringK"` + + // TPSLBySignal TP/SL by reversed signals + TPSLBySignal bool `json:"tpslBySignal"` + + currentTakeProfitPrice fixedpoint.Value + currentStopLossPrice fixedpoint.Value + + // StrategyController + bbgo.StrategyController +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) InstanceID() string { + return fmt.Sprintf("%s:%s", ID, s.Symbol) +} + +func (s *Strategy) Validate() error { + if len(s.Symbol) == 0 { + return errors.New("symbol is required") + } + + if len(s.Interval) == 0 { + return errors.New("interval is required") + } + + if s.Leverage == 0.0 { + return errors.New("leverage is required") + } + + return nil +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.Interval}) +} + +// Position control + +func (s *Strategy) CurrentPosition() *types.Position { + return s.Position +} + +func (s *Strategy) ClosePosition(ctx context.Context, percentage fixedpoint.Value) error { + base := s.Position.GetBase() + if base.IsZero() { + return fmt.Errorf("no opened %s position", s.Position.Symbol) + } + + // make it negative + quantity := base.Mul(percentage).Abs() + side := types.SideTypeBuy + if base.Sign() > 0 { + side = types.SideTypeSell + } + + if quantity.Compare(s.Market.MinQuantity) < 0 { + return fmt.Errorf("%s order quantity %v is too small, less than %v", s.Symbol, quantity, s.Market.MinQuantity) + } + + orderForm := s.generateOrderForm(side, quantity, types.SideEffectTypeAutoRepay) + + log.Infof("submit close position order %v", orderForm) + bbgo.Notify("Submitting %s %s order to close position by %v", s.Symbol, side.String(), percentage) + + _, err := s.orderExecutor.SubmitOrders(ctx, orderForm) + if err != nil { + log.WithError(err).Errorf("can not place %s position close order", s.Symbol) + bbgo.Notify("can not place %s position close order", s.Symbol) + } + + return err +} + +// setupIndicators initializes indicators +func (s *Strategy) setupIndicators() { + if s.FastDEMAWindow == 0 { + s.FastDEMAWindow = 144 + } + s.fastDEMA = &indicator.DEMA{IntervalWindow: types.IntervalWindow{Interval: s.Interval, Window: s.FastDEMAWindow}} + + if s.SlowDEMAWindow == 0 { + s.SlowDEMAWindow = 169 + } + s.slowDEMA = &indicator.DEMA{IntervalWindow: types.IntervalWindow{Interval: s.Interval, Window: s.SlowDEMAWindow}} + + if s.SupertrendWindow == 0 { + s.SupertrendWindow = 39 + } + if s.SupertrendMultiplier == 0 { + s.SupertrendMultiplier = 3 + } + s.Supertrend = &indicator.Supertrend{IntervalWindow: types.IntervalWindow{Window: s.SupertrendWindow, Interval: s.Interval}, ATRMultiplier: s.SupertrendMultiplier} + s.Supertrend.AverageTrueRange = &indicator.ATR{IntervalWindow: types.IntervalWindow{Window: s.SupertrendWindow, Interval: s.Interval}} + +} + +// updateIndicators updates indicators +func (s *Strategy) updateIndicators(kline types.KLine) { + closePrice := kline.GetClose().Float64() + + // Update indicators + if kline.Interval == s.fastDEMA.Interval { + s.fastDEMA.Update(closePrice) + } + if kline.Interval == s.slowDEMA.Interval { + s.slowDEMA.Update(closePrice) + } + if kline.Interval == s.Supertrend.Interval { + s.Supertrend.Update(kline.GetHigh().Float64(), kline.GetLow().Float64(), closePrice) + } +} + +func (s *Strategy) generateOrderForm(side types.SideType, quantity fixedpoint.Value, marginOrderSideEffect types.MarginOrderSideEffectType) types.SubmitOrder { + orderForm := types.SubmitOrder{ + Symbol: s.Symbol, + Market: s.Market, + Side: side, + Type: types.OrderTypeMarket, + Quantity: quantity, + MarginSideEffect: marginOrderSideEffect, + GroupID: s.groupID, + } + + return orderForm +} + +// calculateQuantity returns leveraged quantity +func (s *Strategy) calculateQuantity(currentPrice fixedpoint.Value) fixedpoint.Value { + balance, ok := s.session.GetAccount().Balance(s.Market.QuoteCurrency) + if !ok { + log.Errorf("can not update %s balance from exchange", s.Symbol) + return fixedpoint.Zero + } + + amountAvailable := balance.Available.Mul(fixedpoint.NewFromFloat(s.Leverage)) + quantity := amountAvailable.Div(currentPrice) + + return quantity +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + s.session = session + + // calculate group id for orders + instanceID := s.InstanceID() + s.groupID = util.FNV32(instanceID) + + // If position is nil, we need to allocate a new position for calculation + if s.Position == nil { + s.Position = types.NewPositionFromMarket(s.Market) + } + // Always update the position fields + s.Position.Strategy = ID + s.Position.StrategyInstanceID = s.InstanceID() + + // Set fee rate + if s.session.MakerFeeRate.Sign() > 0 || s.session.TakerFeeRate.Sign() > 0 { + s.Position.SetExchangeFeeRate(s.session.ExchangeName, types.ExchangeFee{ + MakerFeeRate: s.session.MakerFeeRate, + TakerFeeRate: s.session.TakerFeeRate, + }) + } + + // Profit + if s.ProfitStats == nil { + s.ProfitStats = types.NewProfitStats(s.Market) + } + + // Setup order executor + s.orderExecutor = bbgo.NewGeneralOrderExecutor(session, s.Symbol, ID, instanceID, s.Position) + s.orderExecutor.BindEnvironment(s.Environment) + s.orderExecutor.BindProfitStats(s.ProfitStats) + s.orderExecutor.Bind() + + // Sync position to redis on trade + s.orderExecutor.TradeCollector().OnPositionUpdate(func(position *types.Position) { + if err := s.Persistence.Sync(s); err != nil { + log.WithError(err).Errorf("can not sync state to persistence") + } + }) + + s.stopC = make(chan struct{}) + + // StrategyController + s.Status = types.StrategyStatusRunning + + s.OnSuspend(func() { + _ = s.orderExecutor.GracefulCancel(ctx) + _ = s.Persistence.Sync(s) + }) + + s.OnEmergencyStop(func() { + _ = s.orderExecutor.GracefulCancel(ctx) + // Close 100% position + _ = s.ClosePosition(ctx, fixedpoint.One) + }) + + // Setup indicators + s.setupIndicators() + + s.currentStopLossPrice = fixedpoint.Zero + s.currentTakeProfitPrice = fixedpoint.Zero + + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + // StrategyController + if s.Status != types.StrategyStatusRunning { + return + } + + // skip k-lines from other symbols or other intervals + if kline.Symbol != s.Symbol || kline.Interval != s.Interval { + return + } + + // Update indicators + s.updateIndicators(kline) + + // Get signals + closePrice := kline.GetClose().Float64() + openPrice := kline.GetOpen().Float64() + stSignal := s.Supertrend.GetSignal() + var demaSignal types.Direction + if closePrice > s.fastDEMA.Last() && closePrice > s.slowDEMA.Last() && !(openPrice > s.fastDEMA.Last() && openPrice > s.slowDEMA.Last()) { + demaSignal = types.DirectionUp + } else if closePrice < s.fastDEMA.Last() && closePrice < s.slowDEMA.Last() && !(openPrice < s.fastDEMA.Last() && openPrice < s.slowDEMA.Last()) { + demaSignal = types.DirectionDown + } else { + demaSignal = types.DirectionNone + } + + base := s.Position.GetBase() + baseSign := base.Sign() + + // TP/SL if there's non-dust position + if !s.Market.IsDustQuantity(base.Abs(), kline.GetClose()) { + if s.StopLossByTriggeringK && !s.currentStopLossPrice.IsZero() && ((baseSign < 0 && kline.GetClose().Compare(s.currentStopLossPrice) > 0) || (baseSign > 0 && kline.GetClose().Compare(s.currentStopLossPrice) < 0)) { + // SL by triggering Kline low + log.Infof("%s SL by triggering Kline low", s.Symbol) + bbgo.Notify("%s StopLoss by triggering the kline low", s.Symbol) + if err := s.ClosePosition(ctx, fixedpoint.One); err == nil { + s.currentStopLossPrice = fixedpoint.Zero + s.currentTakeProfitPrice = fixedpoint.Zero + } + } else if s.TakeProfitMultiplier > 0 && !s.currentTakeProfitPrice.IsZero() && ((baseSign < 0 && kline.GetClose().Compare(s.currentTakeProfitPrice) < 0) || (baseSign > 0 && kline.GetClose().Compare(s.currentTakeProfitPrice) > 0)) { + // TP by multiple of ATR + log.Infof("%s TP by multiple of ATR", s.Symbol) + bbgo.Notify("%s TakeProfit by multiple of ATR", s.Symbol) + if err := s.ClosePosition(ctx, fixedpoint.One); err == nil { + s.currentStopLossPrice = fixedpoint.Zero + s.currentTakeProfitPrice = fixedpoint.Zero + } + } else if s.TPSLBySignal { + // Use signals to TP/SL + log.Infof("%s TP/SL by reverse of DEMA or Supertrend", s.Symbol) + bbgo.Notify("%s TP/SL by reverse of DEMA or Supertrend", s.Symbol) + if (baseSign < 0 && (stSignal == types.DirectionUp || demaSignal == types.DirectionUp)) || (baseSign > 0 && (stSignal == types.DirectionDown || demaSignal == types.DirectionDown)) { + if err := s.ClosePosition(ctx, fixedpoint.One); err == nil { + s.currentStopLossPrice = fixedpoint.Zero + s.currentTakeProfitPrice = fixedpoint.Zero + } + } + } + } + + // Open position + var side types.SideType + if stSignal == types.DirectionUp && demaSignal == types.DirectionUp { + side = types.SideTypeBuy + if s.StopLossByTriggeringK { + s.currentStopLossPrice = kline.GetLow() + } + if s.TakeProfitMultiplier > 0 { + s.currentTakeProfitPrice = kline.GetClose().Add(fixedpoint.NewFromFloat(s.Supertrend.AverageTrueRange.Last() * s.TakeProfitMultiplier)) + } + } else if stSignal == types.DirectionDown && demaSignal == types.DirectionDown { + side = types.SideTypeSell + if s.StopLossByTriggeringK { + s.currentStopLossPrice = kline.GetHigh() + } + if s.TakeProfitMultiplier > 0 { + s.currentTakeProfitPrice = kline.GetClose().Sub(fixedpoint.NewFromFloat(s.Supertrend.AverageTrueRange.Last() * s.TakeProfitMultiplier)) + } + } + + // The default value of side is an empty string. Unless side is set by the checks above, the result of the following condition is false + if side == types.SideTypeSell || side == types.SideTypeBuy { + log.Infof("open %s position for signal %v", s.Symbol, side) + bbgo.Notify("open %s position for signal %v", s.Symbol, side) + // Close opposite position if any + if !s.Position.IsDust(kline.GetClose()) { + if (side == types.SideTypeSell && s.Position.IsLong()) || (side == types.SideTypeBuy && s.Position.IsShort()) { + log.Infof("close existing %s position before open a new position", s.Symbol) + bbgo.Notify("close existing %s position before open a new position", s.Symbol) + _ = s.ClosePosition(ctx, fixedpoint.One) + } else { + log.Infof("existing %s position has the same direction with the signal", s.Symbol) + bbgo.Notify("existing %s position has the same direction with the signal", s.Symbol) + return + } + } + + orderForm := s.generateOrderForm(side, s.calculateQuantity(kline.GetClose()), types.SideEffectTypeMarginBuy) + log.Infof("submit open position order %v", orderForm) + _, err := s.orderExecutor.SubmitOrders(ctx, orderForm) + if err != nil { + log.WithError(err).Errorf("can not place %s open position order", s.Symbol) + bbgo.Notify("can not place %s open position order", s.Symbol) + } + } + }) + + // Graceful shutdown + s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { + defer wg.Done() + close(s.stopC) + + _ = s.orderExecutor.GracefulCancel(ctx) + }) + + return nil +} diff --git a/pkg/strategy/support/strategy.go b/pkg/strategy/support/strategy.go new file mode 100644 index 0000000000..e79eca793d --- /dev/null +++ b/pkg/strategy/support/strategy.go @@ -0,0 +1,631 @@ +package support + +import ( + "context" + "fmt" + "sync" + + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" +) + +const ID = "support" + +const stateKey = "state-v1" + +var log = logrus.WithField("strategy", ID) + +var zeroiw = types.IntervalWindow{} + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type State struct { + Position *types.Position `json:"position,omitempty"` + CurrentHighestPrice *fixedpoint.Value `json:"currentHighestPrice,omitempty"` +} + +type Target struct { + ProfitPercentage fixedpoint.Value `json:"profitPercentage"` + QuantityPercentage fixedpoint.Value `json:"quantityPercentage"` + MarginOrderSideEffect types.MarginOrderSideEffectType `json:"marginOrderSideEffect"` +} + +// PercentageTargetStop is a kind of stop order by setting fixed percentage target +type PercentageTargetStop struct { + Targets []Target `json:"targets"` +} + +// GenerateOrders generates the orders from the given targets +func (stop *PercentageTargetStop) GenerateOrders(market types.Market, pos *types.Position) []types.SubmitOrder { + var price = pos.AverageCost + var quantity = pos.GetBase() + + // submit target orders + var targetOrders []types.SubmitOrder + for _, target := range stop.Targets { + targetPrice := price.Mul(fixedpoint.One.Add(target.ProfitPercentage)) + targetQuantity := quantity.Mul(target.QuantityPercentage) + targetQuoteQuantity := targetPrice.Mul(targetQuantity) + + if targetQuoteQuantity.Compare(market.MinNotional) <= 0 { + continue + } + + if targetQuantity.Compare(market.MinQuantity) <= 0 { + continue + } + + targetOrders = append(targetOrders, types.SubmitOrder{ + Symbol: market.Symbol, + Market: market, + Type: types.OrderTypeLimit, + Side: types.SideTypeSell, + Price: targetPrice, + Quantity: targetQuantity, + MarginSideEffect: target.MarginOrderSideEffect, + TimeInForce: types.TimeInForceGTC, + }) + } + + return targetOrders +} + +type TrailingStopTarget struct { + TrailingStopCallbackRatio fixedpoint.Value `json:"callbackRatio"` + MinimumProfitPercentage fixedpoint.Value `json:"minimumProfitPercentage"` +} + +type TrailingStopControl struct { + symbol string + market types.Market + marginSideEffect types.MarginOrderSideEffectType + + trailingStopCallbackRatio fixedpoint.Value + minimumProfitPercentage fixedpoint.Value + + CurrentHighestPrice fixedpoint.Value + StopOrder *types.Order +} + +func (control *TrailingStopControl) UpdateCurrentHighestPrice(p fixedpoint.Value) bool { + orig := control.CurrentHighestPrice + control.CurrentHighestPrice = fixedpoint.Max(control.CurrentHighestPrice, p) + return orig.Compare(control.CurrentHighestPrice) == 0 +} + +func (control *TrailingStopControl) IsHigherThanMin(minTargetPrice fixedpoint.Value) bool { + targetPrice := control.CurrentHighestPrice.Mul(fixedpoint.One.Sub(control.trailingStopCallbackRatio)) + + return targetPrice.Compare(minTargetPrice) >= 0 +} + +func (control *TrailingStopControl) GenerateStopOrder(quantity fixedpoint.Value) types.SubmitOrder { + targetPrice := control.CurrentHighestPrice.Mul(fixedpoint.One.Sub(control.trailingStopCallbackRatio)) + + orderForm := types.SubmitOrder{ + Symbol: control.symbol, + Market: control.market, + Side: types.SideTypeSell, + Type: types.OrderTypeStopLimit, + Quantity: quantity, + MarginSideEffect: control.marginSideEffect, + TimeInForce: types.TimeInForceGTC, + + Price: targetPrice, + StopPrice: targetPrice, + } + + return orderForm +} + +// Not implemented yet +// ResistanceStop is a kind of stop order by detecting resistance +// type ResistanceStop struct { +// Interval types.Interval `json:"interval"` +// sensitivity fixedpoint.Value `json:"sensitivity"` +// MinVolume fixedpoint.Value `json:"minVolume"` +// TakerBuyRatio fixedpoint.Value `json:"takerBuyRatio"` +// } + +type Strategy struct { + *bbgo.Persistence `json:"-"` + *bbgo.Environment `json:"-"` + *bbgo.Graceful `json:"-"` + + session *bbgo.ExchangeSession + + Symbol string `json:"symbol"` + Market types.Market `json:"-"` + + // Interval for checking support + Interval types.Interval `json:"interval"` + + // moving average window for checking support (support should be under the moving average line) + TriggerMovingAverage types.IntervalWindow `json:"triggerMovingAverage"` + + // LongTermMovingAverage is the second moving average line for checking support position + LongTermMovingAverage types.IntervalWindow `json:"longTermMovingAverage"` + + Quantity fixedpoint.Value `json:"quantity"` + MinVolume fixedpoint.Value `json:"minVolume"` + Sensitivity fixedpoint.Value `json:"sensitivity"` + TakerBuyRatio fixedpoint.Value `json:"takerBuyRatio"` + MarginOrderSideEffect types.MarginOrderSideEffectType `json:"marginOrderSideEffect"` + Targets []Target `json:"targets"` + + // Not implemented yet + // ResistanceStop *ResistanceStop `json:"resistanceStop"` + // + // ResistanceTakerBuyRatio fixedpoint.Value `json:"resistanceTakerBuyRatio"` + + // Min BaseAsset balance to keep + MinBaseAssetBalance fixedpoint.Value `json:"minBaseAssetBalance"` + // Max BaseAsset balance to buy + MaxBaseAssetBalance fixedpoint.Value `json:"maxBaseAssetBalance"` + MinQuoteAssetBalance fixedpoint.Value `json:"minQuoteAssetBalance"` + + ScaleQuantity *bbgo.PriceVolumeScale `json:"scaleQuantity"` + + orderExecutor *bbgo.GeneralOrderExecutor + + Position *types.Position `persistence:"position"` + ProfitStats *types.ProfitStats `persistence:"profit_stats"` + TradeStats *types.TradeStats `persistence:"trade_stats"` + CurrentHighestPrice fixedpoint.Value `persistence:"current_highest_price"` + + state *State + + triggerEMA *indicator.EWMA + longTermEMA *indicator.EWMA + + // Trailing stop + TrailingStopTarget TrailingStopTarget `json:"trailingStopTarget"` + trailingStopControl *TrailingStopControl + + // StrategyController + bbgo.StrategyController +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) InstanceID() string { + return fmt.Sprintf("%s:%s", ID, s.Symbol) +} + +func (s *Strategy) Validate() error { + if s.Quantity.IsZero() && s.ScaleQuantity == nil { + return fmt.Errorf("quantity or scaleQuantity can not be zero") + } + + if s.MinVolume.IsZero() && s.Sensitivity.IsZero() { + return fmt.Errorf("either minVolume nor sensitivity can not be zero") + } + + return nil +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.Interval}) + + if s.TriggerMovingAverage != zeroiw { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.TriggerMovingAverage.Interval}) + } + + if s.LongTermMovingAverage != zeroiw { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.LongTermMovingAverage.Interval}) + } +} + +func (s *Strategy) CurrentPosition() *types.Position { + return s.Position +} + +func (s *Strategy) ClosePosition(ctx context.Context, percentage fixedpoint.Value) error { + base := s.Position.GetBase() + if base.IsZero() { + return fmt.Errorf("no opened %s position", s.Position.Symbol) + } + + // make it negative + quantity := base.Mul(percentage).Abs() + side := types.SideTypeBuy + if base.Sign() > 0 { + side = types.SideTypeSell + } + + if quantity.Compare(s.Market.MinQuantity) < 0 { + return fmt.Errorf("order quantity %v is too small, less than %v", quantity, s.Market.MinQuantity) + } + + submitOrder := types.SubmitOrder{ + Symbol: s.Symbol, + Side: side, + Type: types.OrderTypeMarket, + Quantity: quantity, + Market: s.Market, + } + + bbgo.Notify("Submitting %s %s order to close position by %v", s.Symbol, side.String(), percentage, submitOrder) + _, err := s.orderExecutor.SubmitOrders(ctx, submitOrder) + return err +} + +func (s *Strategy) submitOrders(ctx context.Context, orderExecutor bbgo.OrderExecutor, orderForms ...types.SubmitOrder) (types.OrderSlice, error) { + return s.orderExecutor.SubmitOrders(ctx, orderForms...) +} + +var slippageModifier = fixedpoint.NewFromFloat(1.003) + +func (s *Strategy) calculateQuantity(session *bbgo.ExchangeSession, side types.SideType, closePrice fixedpoint.Value, volume fixedpoint.Value) (fixedpoint.Value, error) { + var quantity fixedpoint.Value + if s.Quantity.Sign() > 0 { + quantity = s.Quantity + } else if s.ScaleQuantity != nil { + q, err := s.ScaleQuantity.Scale(closePrice.Float64(), volume.Float64()) + if err != nil { + return fixedpoint.Zero, err + } + quantity = fixedpoint.NewFromFloat(q) + } + + baseBalance, _ := session.GetAccount().Balance(s.Market.BaseCurrency) + if side == types.SideTypeSell { + // quantity = bbgo.AdjustQuantityByMaxAmount(quantity, closePrice, quota) + if s.MinBaseAssetBalance.Sign() > 0 && + baseBalance.Total().Sub(quantity).Compare(s.MinBaseAssetBalance) < 0 { + quota := baseBalance.Available.Sub(s.MinBaseAssetBalance) + quantity = bbgo.AdjustQuantityByMaxAmount(quantity, closePrice, quota) + } + + } else if side == types.SideTypeBuy { + if s.MaxBaseAssetBalance.Sign() > 0 && + baseBalance.Total().Add(quantity).Compare(s.MaxBaseAssetBalance) > 0 { + quota := s.MaxBaseAssetBalance.Sub(baseBalance.Total()) + quantity = bbgo.AdjustQuantityByMaxAmount(quantity, closePrice, quota) + } + + quoteBalance, ok := session.GetAccount().Balance(s.Market.QuoteCurrency) + if !ok { + return fixedpoint.Zero, fmt.Errorf("quote balance %s not found", s.Market.QuoteCurrency) + } + + // for spot, we need to modify the quantity according to the quote balance + if !session.Margin { + // add 0.3% for price slippage + notional := closePrice.Mul(quantity).Mul(slippageModifier) + + if s.MinQuoteAssetBalance.Sign() > 0 && + quoteBalance.Available.Sub(notional).Compare(s.MinQuoteAssetBalance) < 0 { + log.Warnf("modifying quantity %v according to the min quote asset balance %v %s", + quantity, + quoteBalance.Available, + s.Market.QuoteCurrency) + quota := quoteBalance.Available.Sub(s.MinQuoteAssetBalance) + quantity = bbgo.AdjustQuantityByMinAmount(quantity, closePrice, quota) + } else if notional.Compare(quoteBalance.Available) > 0 { + log.Warnf("modifying quantity %v according to the quote asset balance %v %s", + quantity, + quoteBalance.Available, + s.Market.QuoteCurrency) + quantity = bbgo.AdjustQuantityByMaxAmount(quantity, closePrice, quoteBalance.Available) + } + } + } + + return quantity, nil +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + s.session = session + instanceID := s.InstanceID() + + if s.Position == nil { + s.Position = types.NewPositionFromMarket(s.Market) + } + + if s.ProfitStats == nil { + s.ProfitStats = types.NewProfitStats(s.Market) + } + + // trade stats + if s.TradeStats == nil { + s.TradeStats = &types.TradeStats{} + } + + s.orderExecutor = bbgo.NewGeneralOrderExecutor(session, s.Symbol, ID, instanceID, s.Position) + s.orderExecutor.BindEnvironment(s.Environment) + s.orderExecutor.BindProfitStats(s.ProfitStats) + s.orderExecutor.BindTradeStats(s.TradeStats) + s.orderExecutor.Bind() + + // StrategyController + s.Status = types.StrategyStatusRunning + + s.OnSuspend(func() { + // Cancel all order + _ = s.orderExecutor.GracefulCancel(ctx) + _ = s.Persistence.Sync(s) + }) + + s.OnEmergencyStop(func() { + // Close 100% position + percentage := fixedpoint.NewFromFloat(1.0) + if err := s.ClosePosition(context.Background(), percentage); err != nil { + errMsg := "failed to close position" + log.WithError(err).Errorf(errMsg) + bbgo.Notify(errMsg) + } + + if err := s.Suspend(); err != nil { + errMsg := "failed to suspend strategy" + log.WithError(err).Errorf(errMsg) + bbgo.Notify(errMsg) + } + }) + + // set default values + if s.Interval == "" { + s.Interval = types.Interval5m + } + + if s.Sensitivity.Sign() > 0 { + volRange, err := s.ScaleQuantity.ByVolumeRule.Range() + if err != nil { + return err + } + + scaleUp := fixedpoint.NewFromFloat(volRange[1]) + scaleLow := fixedpoint.NewFromFloat(volRange[0]) + s.MinVolume = scaleUp.Sub(scaleLow). + Mul(fixedpoint.One.Sub(s.Sensitivity)). + Add(scaleLow) + log.Infof("adjusted minimal support volume to %s according to sensitivity %s", s.MinVolume.String(), s.Sensitivity.String()) + } + + standardIndicatorSet, ok := session.StandardIndicatorSet(s.Symbol) + if !ok { + return fmt.Errorf("standardIndicatorSet is nil, symbol %s", s.Symbol) + } + + if s.TriggerMovingAverage != zeroiw { + s.triggerEMA = standardIndicatorSet.EWMA(s.TriggerMovingAverage) + } else { + s.triggerEMA = standardIndicatorSet.EWMA(types.IntervalWindow{ + Interval: s.Interval, + Window: 99, // default window + }) + } + + if s.LongTermMovingAverage != zeroiw { + s.longTermEMA = standardIndicatorSet.EWMA(s.LongTermMovingAverage) + } + + if !s.TrailingStopTarget.TrailingStopCallbackRatio.IsZero() { + s.trailingStopControl = &TrailingStopControl{ + symbol: s.Symbol, + market: s.Market, + marginSideEffect: s.MarginOrderSideEffect, + trailingStopCallbackRatio: s.TrailingStopTarget.TrailingStopCallbackRatio, + minimumProfitPercentage: s.TrailingStopTarget.MinimumProfitPercentage, + CurrentHighestPrice: s.CurrentHighestPrice, + } + } + + if !s.TrailingStopTarget.TrailingStopCallbackRatio.IsZero() { + // Update trailing stop when the position changes + s.orderExecutor.TradeCollector().OnPositionUpdate(func(position *types.Position) { + // StrategyController + if s.Status != types.StrategyStatusRunning { + return + } + + if !position.IsLong() || position.IsDust(position.AverageCost) { + return + } + + s.updateStopOrder(ctx) + }) + } + + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + // StrategyController + if s.Status != types.StrategyStatusRunning { + return + } + + // skip k-lines from other symbols + if kline.Symbol != s.Symbol { + return + } + if kline.Interval != s.Interval { + return + } + + closePrice := kline.GetClose() + highPrice := kline.GetHigh() + + // check our trailing stop + if s.TrailingStopTarget.TrailingStopCallbackRatio.Sign() > 0 { + if s.Position.IsLong() && !s.Position.IsDust(closePrice) { + changed := s.trailingStopControl.UpdateCurrentHighestPrice(highPrice) + if changed { + // Cancel the original order + s.updateStopOrder(ctx) + } + } + } + + // check support volume + if kline.Volume.Compare(s.MinVolume) < 0 { + return + } + + // check taker buy ratio, we need strong buy taker + if s.TakerBuyRatio.Sign() > 0 { + takerBuyRatio := kline.TakerBuyBaseAssetVolume.Div(kline.Volume) + takerBuyBaseVolumeThreshold := kline.Volume.Mul(s.TakerBuyRatio) + if takerBuyRatio.Compare(s.TakerBuyRatio) < 0 { + bbgo.Notify("%s: taker buy base volume %s (volume ratio %s) is less than %s (volume ratio %s)", + s.Symbol, + kline.TakerBuyBaseAssetVolume.String(), + takerBuyRatio.String(), + takerBuyBaseVolumeThreshold.String(), + kline.Volume.String(), + s.TakerBuyRatio.String(), + kline, + ) + return + } + } + + if s.longTermEMA != nil && closePrice.Float64() < s.longTermEMA.Last() { + bbgo.Notify("%s: closed price is below the long term moving average line %f, skipping this support", + s.Symbol, + s.longTermEMA.Last(), + kline, + ) + return + } + + if s.triggerEMA != nil && closePrice.Float64() > s.triggerEMA.Last() { + bbgo.Notify("%s: closed price is above the trigger moving average line %f, skipping this support", + s.Symbol, + s.triggerEMA.Last(), + kline, + ) + return + } + + if s.triggerEMA != nil && s.longTermEMA != nil { + bbgo.Notify("Found %s support: the close price %s is below trigger EMA %f and above long term EMA %f and volume %s > minimum volume %s", + s.Symbol, + closePrice.String(), + s.triggerEMA.Last(), + s.longTermEMA.Last(), + kline.Volume.String(), + s.MinVolume.String(), + kline) + } else { + bbgo.Notify("Found %s support: the close price %s and volume %s > minimum volume %s", + s.Symbol, + closePrice.String(), + kline.Volume.String(), + s.MinVolume.String(), + kline) + } + + quantity, err := s.calculateQuantity(session, types.SideTypeBuy, closePrice, kline.Volume) + if err != nil { + log.WithError(err).Errorf("%s quantity calculation error", s.Symbol) + return + } + + orderForm := types.SubmitOrder{ + Symbol: s.Symbol, + Market: s.Market, + Side: types.SideTypeBuy, + Type: types.OrderTypeMarket, + Quantity: quantity, + MarginSideEffect: s.MarginOrderSideEffect, + } + + bbgo.Notify("Submitting %s market order buy with quantity %s according to the base volume %s, taker buy base volume %s", + s.Symbol, + quantity.String(), + kline.Volume.String(), + kline.TakerBuyBaseAssetVolume.String(), + orderForm) + + if _, err := s.submitOrders(ctx, orderExecutor, orderForm); err != nil { + log.WithError(err).Error("submit order error") + return + } + + if s.TrailingStopTarget.TrailingStopCallbackRatio.IsZero() { // submit fixed target orders + var targetOrders []types.SubmitOrder + for _, target := range s.Targets { + targetPrice := closePrice.Mul(fixedpoint.One.Add(target.ProfitPercentage)) + targetQuantity := quantity.Mul(target.QuantityPercentage) + targetQuoteQuantity := targetPrice.Mul(targetQuantity) + + if targetQuoteQuantity.Compare(s.Market.MinNotional) <= 0 { + continue + } + + if targetQuantity.Compare(s.Market.MinQuantity) <= 0 { + continue + } + + targetOrders = append(targetOrders, types.SubmitOrder{ + Symbol: kline.Symbol, + Market: s.Market, + Type: types.OrderTypeLimit, + Side: types.SideTypeSell, + Price: targetPrice, + Quantity: targetQuantity, + + MarginSideEffect: target.MarginOrderSideEffect, + TimeInForce: types.TimeInForceGTC, + }) + } + + _, err = s.orderExecutor.SubmitOrders(ctx, targetOrders...) + if err != nil { + bbgo.Notify("submit %s profit trailing stop order error: %s", s.Symbol, err.Error()) + } + } + }) + + s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { + defer wg.Done() + + // Cancel trailing stop order + if s.TrailingStopTarget.TrailingStopCallbackRatio.Sign() > 0 { + _ = s.orderExecutor.GracefulCancel(ctx) + } + }) + + return nil +} + +func (s *Strategy) updateStopOrder(ctx context.Context) { + // cancel the original stop order + if s.trailingStopControl.StopOrder != nil { + if err := s.session.Exchange.CancelOrders(ctx, *s.trailingStopControl.StopOrder); err != nil { + log.WithError(err).Error("cancel order error") + } + s.trailingStopControl.StopOrder = nil + s.orderExecutor.TradeCollector().Process() + } + + // Calculate minimum target price + var minTargetPrice = fixedpoint.Zero + if s.trailingStopControl.minimumProfitPercentage.Sign() > 0 { + minTargetPrice = s.Position.AverageCost.Mul(fixedpoint.One.Add(s.trailingStopControl.minimumProfitPercentage)) + } + + // Place new order if the target price is higher than the minimum target price + if s.trailingStopControl.IsHigherThanMin(minTargetPrice) { + orderForm := s.trailingStopControl.GenerateStopOrder(s.Position.Base) + orders, err := s.orderExecutor.SubmitOrders(ctx, orderForm) + if err != nil { + bbgo.Notify("failed to submit the trailing stop order on %s", s.Symbol) + log.WithError(err).Error("submit profit trailing stop order error") + } + + if len(orders) == 0 { + log.Error("unexpected error: len(createdOrders) = 0") + return + } + + s.trailingStopControl.StopOrder = &orders[0] + } +} diff --git a/pkg/strategy/swing/strategy.go b/pkg/strategy/swing/strategy.go index 2d3aeb6761..da85591ce2 100644 --- a/pkg/strategy/swing/strategy.go +++ b/pkg/strategy/swing/strategy.go @@ -3,15 +3,17 @@ package swing import ( "context" "fmt" - "math" log "github.com/sirupsen/logrus" "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/types" ) -// The indicators (SMA and EWMA) that we want to use are returning float64 data. +const ID = "swing" + +// Float64Indicator is the indicators (SMA and EWMA) that we want to use are returning float64 data. type Float64Indicator interface { Last() float64 } @@ -20,14 +22,10 @@ func init() { // Register the pointer of the strategy struct, // so that bbgo knows what struct to be used to unmarshal the configs (YAML or JSON) // Note: built-in strategies need to imported manually in the bbgo cmd package. - bbgo.RegisterStrategy("swing", &Strategy{}) + bbgo.RegisterStrategy(ID, &Strategy{}) } type Strategy struct { - // The notification system will be injected into the strategy automatically. - // This field will be injected automatically since it's a single exchange strategy. - *bbgo.Notifiability - // OrderExecutor is an interface for submitting order. // This field will be injected automatically since it's a single exchange strategy. bbgo.OrderExecutor @@ -53,14 +51,14 @@ type Strategy struct { // Interval is the interval of the kline channel we want to subscribe, // the kline event will trigger the strategy to check if we need to submit order. - Interval string `json:"interval"` + Interval types.Interval `json:"interval"` // MinChange filters out the k-lines with small changes. so that our strategy will only be triggered // in specific events. - MinChange float64 `json:"minChange"` + MinChange fixedpoint.Value `json:"minChange"` // BaseQuantity is the base quantity of the submit order. for both BUY and SELL, market order will be used. - BaseQuantity float64 `json:"baseQuantity"` + BaseQuantity fixedpoint.Value `json:"baseQuantity"` // MovingAverageType is the moving average indicator type that we want to use, // it could be SMA or EWMA @@ -76,6 +74,10 @@ type Strategy struct { MovingAverageWindow int `json:"movingAverageWindow"` } +func (s *Strategy) ID() string { + return ID +} + func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: s.Interval}) } @@ -96,8 +98,7 @@ func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, se } - // session.Stream.OnKLineClosed - session.Stream.OnKLineClosed(func(kline types.KLine) { + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { // skip k-lines from other symbols if kline.Symbol != s.Symbol { return @@ -111,20 +112,20 @@ func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, se } // skip if the change is not above the minChange - if math.Abs(kline.GetChange()) < s.MinChange { + if kline.GetChange().Abs().Compare(s.MinChange) < 0 { return } closePrice := kline.Close - changePercentage := kline.GetChange() / kline.Open - quantity := s.BaseQuantity * (1.0 + math.Abs(changePercentage)) + changePercentage := kline.GetChange().Div(kline.Open) + quantity := s.BaseQuantity.Mul(fixedpoint.One.Add(changePercentage.Abs())) trend := kline.Direction() switch trend { case types.DirectionUp: // if it goes up and it's above the moving average price, then we sell - if closePrice > movingAveragePrice { - s.notify(":chart_with_upwards_trend: closePrice %f is above movingAveragePrice %f, submitting SELL order", closePrice, movingAveragePrice) + if closePrice.Float64() > movingAveragePrice { + s.notify(":chart_with_upwards_trend: closePrice %v is above movingAveragePrice %v, submitting SELL order", closePrice, movingAveragePrice) _, err := orderExecutor.SubmitOrders(ctx, types.SubmitOrder{ Symbol: s.Symbol, @@ -139,8 +140,8 @@ func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, se } case types.DirectionDown: // if it goes down and it's below the moving average price, then we buy - if closePrice < movingAveragePrice { - s.notify(":chart_with_downwards_trend: closePrice %f is below movingAveragePrice %f, submitting BUY order", closePrice, movingAveragePrice) + if closePrice.Float64() < movingAveragePrice { + s.notify(":chart_with_downwards_trend: closePrice %v is below movingAveragePrice %v, submitting BUY order", closePrice, movingAveragePrice) _, err := orderExecutor.SubmitOrders(ctx, types.SubmitOrder{ Symbol: s.Symbol, @@ -159,9 +160,9 @@ func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, se } func (s *Strategy) notify(format string, args ...interface{}) { - if channel, ok := s.RouteSymbol(s.Symbol); ok { - s.NotifyTo(channel, format, args...) + if channel, ok := bbgo.Notification.RouteSymbol(s.Symbol); ok { + bbgo.NotifyTo(channel, format, args...) } else { - s.Notify(format, args...) + bbgo.Notify(format, args...) } } diff --git a/pkg/strategy/techsignal/strategy.go b/pkg/strategy/techsignal/strategy.go new file mode 100644 index 0000000000..e95c970d1d --- /dev/null +++ b/pkg/strategy/techsignal/strategy.go @@ -0,0 +1,225 @@ +package techsignal + +import ( + "context" + "errors" + "fmt" + "strings" + "time" + + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/exchange/binance" + "github.com/c9s/bbgo/pkg/fixedpoint" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/types" +) + +const ID = "techsignal" + +var log = logrus.WithField("strategy", ID) + +func init() { + // Register the pointer of the strategy struct, + // so that bbgo knows what struct to be used to unmarshal the configs (YAML or JSON) + // Note: built-in strategies need to imported manually in the bbgo cmd package. + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type Strategy struct { + // These fields will be filled from the config file (it translates YAML to JSON) + Symbol string `json:"symbol"` + Market types.Market `json:"-"` + + FundingRate *struct { + High fixedpoint.Value `json:"high"` + Neutral fixedpoint.Value `json:"neutral"` + DiffThreshold fixedpoint.Value `json:"diffThreshold"` + } `json:"fundingRate"` + + SupportDetection []struct { + Interval types.Interval `json:"interval"` + + // MovingAverageType is the moving average indicator type that we want to use, + // it could be SMA or EWMA + MovingAverageType string `json:"movingAverageType"` + + // MovingAverageInterval is the interval of k-lines for the moving average indicator to calculate, + // it could be "1m", "5m", "1h" and so on. note that, the moving averages are calculated from + // the k-line data we subscribed + MovingAverageInterval types.Interval `json:"movingAverageInterval"` + + // MovingAverageWindow is the number of the window size of the moving average indicator. + // The number of k-lines in the window. generally used window sizes are 7, 25 and 99 in the TradingView. + MovingAverageWindow int `json:"movingAverageWindow"` + + MinVolume fixedpoint.Value `json:"minVolume"` + + MinQuoteVolume fixedpoint.Value `json:"minQuoteVolume"` + } `json:"supportDetection"` +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + // session.Subscribe(types.BookChannel, s.Symbol, types.SubscribeOptions{}) + for _, detection := range s.SupportDetection { + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{ + Interval: detection.Interval, + }) + + session.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{ + Interval: detection.MovingAverageInterval, + }) + } +} + +func (s *Strategy) Validate() error { + if len(s.Symbol) == 0 { + return errors.New("symbol is required") + } + + return nil +} + +func (s *Strategy) listenToFundingRate(ctx context.Context, exchange *binance.Exchange) { + var previousIndex, fundingRate24HoursLowIndex *types.PremiumIndex + + fundingRateTicker := time.NewTicker(1 * time.Hour) + defer fundingRateTicker.Stop() + for { + select { + + case <-ctx.Done(): + return + + case <-fundingRateTicker.C: + index, err := exchange.QueryPremiumIndex(ctx, s.Symbol) + if err != nil { + log.WithError(err).Error("can not query last funding rate") + continue + } + + fundingRate := index.LastFundingRate + + if fundingRate.Compare(s.FundingRate.High) >= 0 { + bbgo.Notify("%s funding rate %s is too high! threshold %s", + s.Symbol, + fundingRate.Percentage(), + s.FundingRate.High.Percentage(), + ) + } else { + if previousIndex != nil { + if s.FundingRate.DiffThreshold.IsZero() { + // 0.6% + s.FundingRate.DiffThreshold = fixedpoint.NewFromFloat(0.006 * 0.01) + } + + diff := fundingRate.Sub(previousIndex.LastFundingRate) + if diff.Abs().Compare(s.FundingRate.DiffThreshold) > 0 { + bbgo.Notify("%s funding rate changed %s, current funding rate %s", + s.Symbol, + diff.SignedPercentage(), + fundingRate.Percentage(), + ) + } + } + } + + previousIndex = index + if fundingRate24HoursLowIndex != nil { + if fundingRate24HoursLowIndex.Time.Before(time.Now().Add(24 * time.Hour)) { + fundingRate24HoursLowIndex = index + } + if fundingRate.Compare(fundingRate24HoursLowIndex.LastFundingRate) < 0 { + fundingRate24HoursLowIndex = index + } + } else { + fundingRate24HoursLowIndex = index + } + } + } +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + standardIndicatorSet, ok := session.StandardIndicatorSet(s.Symbol) + if !ok { + return fmt.Errorf("standardIndicatorSet is nil, symbol %s", s.Symbol) + } + + if s.FundingRate != nil { + if binanceExchange, ok := session.Exchange.(*binance.Exchange); ok { + go s.listenToFundingRate(ctx, binanceExchange) + } else { + log.Error("exchange does not support funding rate api") + } + } + + session.MarketDataStream.OnKLineClosed(func(kline types.KLine) { + // skip k-lines from other symbols + if kline.Symbol != s.Symbol { + return + } + + for _, detection := range s.SupportDetection { + if kline.Interval != detection.Interval { + continue + } + + closePrice := kline.GetClose() + + var ma types.Float64Indicator + + switch strings.ToLower(detection.MovingAverageType) { + case "sma": + ma = standardIndicatorSet.SMA(types.IntervalWindow{ + Interval: detection.MovingAverageInterval, + Window: detection.MovingAverageWindow, + }) + case "ema", "ewma": + ma = standardIndicatorSet.EWMA(types.IntervalWindow{ + Interval: detection.MovingAverageInterval, + Window: detection.MovingAverageWindow, + }) + default: + ma = standardIndicatorSet.EWMA(types.IntervalWindow{ + Interval: detection.MovingAverageInterval, + Window: detection.MovingAverageWindow, + }) + } + + var lastMA = ma.Last() + + // skip if the closed price is above the moving average + if closePrice.Float64() > lastMA { + log.Infof("skip %s support closed price %f > last ma %f", s.Symbol, closePrice.Float64(), lastMA) + return + } + + prettyBaseVolume := s.Market.BaseCurrencyFormatter() + prettyQuoteVolume := s.Market.QuoteCurrencyFormatter() + + if detection.MinVolume.Sign() > 0 && kline.Volume.Compare(detection.MinVolume) > 0 { + bbgo.Notify("Detected %s %s support base volume %s > min base volume %s, quote volume %s", + s.Symbol, detection.Interval.String(), + prettyBaseVolume.FormatMoney(kline.Volume.Trunc()), + prettyBaseVolume.FormatMoney(detection.MinVolume.Trunc()), + prettyQuoteVolume.FormatMoney(kline.QuoteVolume.Trunc()), + ) + bbgo.Notify(kline) + } else if detection.MinQuoteVolume.Sign() > 0 && kline.QuoteVolume.Compare(detection.MinQuoteVolume) > 0 { + bbgo.Notify("Detected %s %s support quote volume %s > min quote volume %s, base volume %s", + s.Symbol, detection.Interval.String(), + prettyQuoteVolume.FormatMoney(kline.QuoteVolume.Trunc()), + prettyQuoteVolume.FormatMoney(detection.MinQuoteVolume.Trunc()), + prettyBaseVolume.FormatMoney(kline.Volume.Trunc()), + ) + bbgo.Notify(kline) + } + } + }) + return nil +} diff --git a/pkg/strategy/wall/strategy.go b/pkg/strategy/wall/strategy.go new file mode 100644 index 0000000000..824cc28cf9 --- /dev/null +++ b/pkg/strategy/wall/strategy.go @@ -0,0 +1,405 @@ +package wall + +import ( + "context" + "fmt" + "sync" + "time" + + "github.com/c9s/bbgo/pkg/util" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +const ID = "wall" + +const stateKey = "state-v1" + +var defaultFeeRate = fixedpoint.NewFromFloat(0.001) +var two = fixedpoint.NewFromInt(2) + +var log = logrus.WithField("strategy", ID) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type Strategy struct { + *bbgo.Graceful + *bbgo.Persistence + + Environment *bbgo.Environment + StandardIndicatorSet *bbgo.StandardIndicatorSet + Market types.Market + + // Symbol is the market symbol you want to trade + Symbol string `json:"symbol"` + + Side types.SideType `json:"side"` + + // Interval is how long do you want to update your order price and quantity + Interval types.Interval `json:"interval"` + + FixedPrice fixedpoint.Value `json:"fixedPrice"` + + bbgo.QuantityOrAmount + + NumLayers int `json:"numLayers"` + + // LayerSpread is the price spread between each layer + LayerSpread fixedpoint.Value `json:"layerSpread"` + + // QuantityScale helps user to define the quantity by layer scale + QuantityScale *bbgo.LayerScale `json:"quantityScale,omitempty"` + + AdjustmentMinSpread fixedpoint.Value `json:"adjustmentMinSpread"` + AdjustmentQuantity fixedpoint.Value `json:"adjustmentQuantity"` + + session *bbgo.ExchangeSession + + // persistence fields + Position *types.Position `json:"position,omitempty" persistence:"position"` + ProfitStats *types.ProfitStats `json:"profitStats,omitempty" persistence:"profit_stats"` + + activeAdjustmentOrders *bbgo.ActiveOrderBook + activeWallOrders *bbgo.ActiveOrderBook + orderStore *bbgo.OrderStore + tradeCollector *bbgo.TradeCollector + + groupID uint32 + + stopC chan struct{} +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) InstanceID() string { + return fmt.Sprintf("%s:%s", ID, s.Symbol) +} + +func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { + session.Subscribe(types.BookChannel, s.Symbol, types.SubscribeOptions{ + Depth: types.DepthLevelFull, + }) +} + +func (s *Strategy) Validate() error { + if len(s.Symbol) == 0 { + return errors.New("symbol is required") + } + + if len(s.Side) == 0 { + return errors.New("side is required") + } + + if s.FixedPrice.IsZero() { + return errors.New("fixedPrice can not be zero") + } + + return nil +} + +func (s *Strategy) CurrentPosition() *types.Position { + return s.Position +} + +func (s *Strategy) placeAdjustmentOrders(ctx context.Context, orderExecutor bbgo.OrderExecutor) error { + var submitOrders []types.SubmitOrder + // position adjustment orders + base := s.Position.GetBase() + if base.IsZero() { + return nil + } + + ticker, err := s.session.Exchange.QueryTicker(ctx, s.Symbol) + if err != nil { + return err + } + + if s.Market.IsDustQuantity(base, ticker.Last) { + return nil + } + + switch s.Side { + case types.SideTypeBuy: + askPrice := ticker.Sell.Mul(s.AdjustmentMinSpread.Add(fixedpoint.One)) + + if s.Position.AverageCost.Compare(askPrice) <= 0 { + return nil + } + + if base.Sign() < 0 { + return nil + } + + quantity := base.Abs() + if quantity.Compare(s.AdjustmentQuantity) >= 0 { + quantity = s.AdjustmentQuantity + } + + submitOrders = append(submitOrders, types.SubmitOrder{ + Symbol: s.Symbol, + Side: s.Side.Reverse(), + Type: types.OrderTypeLimitMaker, + Price: askPrice, + Quantity: quantity, + Market: s.Market, + GroupID: s.groupID, + }) + + case types.SideTypeSell: + bidPrice := ticker.Sell.Mul(fixedpoint.One.Sub(s.AdjustmentMinSpread)) + + if s.Position.AverageCost.Compare(bidPrice) >= 0 { + return nil + } + + if base.Sign() > 0 { + return nil + } + + quantity := base.Abs() + if quantity.Compare(s.AdjustmentQuantity) >= 0 { + quantity = s.AdjustmentQuantity + } + + submitOrders = append(submitOrders, types.SubmitOrder{ + Symbol: s.Symbol, + Side: s.Side.Reverse(), + Type: types.OrderTypeLimitMaker, + Price: bidPrice, + Quantity: quantity, + Market: s.Market, + GroupID: s.groupID, + }) + } + + // condition for lower the average cost + if len(submitOrders) == 0 { + return nil + } + + createdOrders, err := orderExecutor.SubmitOrders(ctx, submitOrders...) + if err != nil { + return err + } + + s.orderStore.Add(createdOrders...) + s.activeAdjustmentOrders.Add(createdOrders...) + return nil +} + +func (s *Strategy) placeWallOrders(ctx context.Context, orderExecutor bbgo.OrderExecutor) error { + var submitOrders []types.SubmitOrder + var startPrice = s.FixedPrice + for i := 0; i < s.NumLayers; i++ { + var price = startPrice + var quantity fixedpoint.Value + if s.QuantityOrAmount.IsSet() { + quantity = s.QuantityOrAmount.CalculateQuantity(price) + } else if s.QuantityScale != nil { + qf, err := s.QuantityScale.Scale(i + 1) + if err != nil { + return err + } + quantity = fixedpoint.NewFromFloat(qf) + } + + order := types.SubmitOrder{ + Symbol: s.Symbol, + Side: s.Side, + Type: types.OrderTypeLimitMaker, + Price: price, + Quantity: quantity, + Market: s.Market, + GroupID: s.groupID, + } + submitOrders = append(submitOrders, order) + switch s.Side { + case types.SideTypeSell: + startPrice = startPrice.Add(s.LayerSpread) + + case types.SideTypeBuy: + startPrice = startPrice.Sub(s.LayerSpread) + + } + } + + // condition for lower the average cost + if len(submitOrders) == 0 { + return nil + } + + createdOrders, err := orderExecutor.SubmitOrders(ctx, submitOrders...) + if err != nil { + return err + } + + s.orderStore.Add(createdOrders...) + s.activeWallOrders.Add(createdOrders...) + return err +} + +func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { + // initial required information + s.session = session + + // calculate group id for orders + instanceID := s.InstanceID() + s.groupID = util.FNV32(instanceID) + + // If position is nil, we need to allocate a new position for calculation + if s.Position == nil { + s.Position = types.NewPositionFromMarket(s.Market) + } + + if s.ProfitStats == nil { + s.ProfitStats = types.NewProfitStats(s.Market) + } + + // Always update the position fields + s.Position.Strategy = ID + s.Position.StrategyInstanceID = instanceID + + s.stopC = make(chan struct{}) + + s.activeWallOrders = bbgo.NewActiveOrderBook(s.Symbol) + s.activeWallOrders.BindStream(session.UserDataStream) + + s.activeAdjustmentOrders = bbgo.NewActiveOrderBook(s.Symbol) + s.activeAdjustmentOrders.BindStream(session.UserDataStream) + + s.orderStore = bbgo.NewOrderStore(s.Symbol) + s.orderStore.BindStream(session.UserDataStream) + + s.tradeCollector = bbgo.NewTradeCollector(s.Symbol, s.Position, s.orderStore) + + s.tradeCollector.OnTrade(func(trade types.Trade, profit, netProfit fixedpoint.Value) { + bbgo.Notify(trade) + s.ProfitStats.AddTrade(trade) + + if profit.Compare(fixedpoint.Zero) == 0 { + s.Environment.RecordPosition(s.Position, trade, nil) + } else { + log.Infof("%s generated profit: %v", s.Symbol, profit) + p := s.Position.NewProfit(trade, profit, netProfit) + p.Strategy = ID + p.StrategyInstanceID = instanceID + bbgo.Notify(&p) + + s.ProfitStats.AddProfit(p) + bbgo.Notify(&s.ProfitStats) + + s.Environment.RecordPosition(s.Position, trade, &p) + } + }) + + s.tradeCollector.OnPositionUpdate(func(position *types.Position) { + log.Infof("position changed: %s", s.Position) + bbgo.Notify(s.Position) + }) + + s.tradeCollector.BindStream(session.UserDataStream) + + session.UserDataStream.OnStart(func() { + if err := s.placeWallOrders(ctx, orderExecutor); err != nil { + log.WithError(err).Errorf("can not place order") + } + }) + + s.activeAdjustmentOrders.OnFilled(func(o types.Order) { + if err := s.activeAdjustmentOrders.GracefulCancel(ctx, s.session.Exchange); err != nil { + log.WithError(err).Errorf("graceful cancel order error") + } + + // check if there is a canceled order had partially filled. + s.tradeCollector.Process() + + if err := s.placeAdjustmentOrders(ctx, orderExecutor); err != nil { + log.WithError(err).Errorf("can not place order") + } + }) + + s.activeWallOrders.OnFilled(func(o types.Order) { + if err := s.activeWallOrders.GracefulCancel(ctx, s.session.Exchange); err != nil { + log.WithError(err).Errorf("graceful cancel order error") + } + + // check if there is a canceled order had partially filled. + s.tradeCollector.Process() + + if err := s.placeWallOrders(ctx, orderExecutor); err != nil { + log.WithError(err).Errorf("can not place order") + } + + if err := s.activeAdjustmentOrders.GracefulCancel(ctx, s.session.Exchange); err != nil { + log.WithError(err).Errorf("graceful cancel order error") + } + + // check if there is a canceled order had partially filled. + s.tradeCollector.Process() + + if err := s.placeAdjustmentOrders(ctx, orderExecutor); err != nil { + log.WithError(err).Errorf("can not place order") + } + }) + + ticker := time.NewTicker(s.Interval.Duration()) + go func() { + defer ticker.Stop() + for { + select { + case <-ctx.Done(): + return + + case <-ticker.C: + orders := s.activeWallOrders.Orders() + if anyOrderFilled(orders) { + if err := s.activeWallOrders.GracefulCancel(ctx, s.session.Exchange); err != nil { + log.WithError(err).Errorf("graceful cancel order error") + } + + // check if there is a canceled order had partially filled. + s.tradeCollector.Process() + + if err := s.placeWallOrders(ctx, orderExecutor); err != nil { + log.WithError(err).Errorf("can not place order") + } + } + } + } + }() + + s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { + defer wg.Done() + close(s.stopC) + + if err := s.activeWallOrders.GracefulCancel(ctx, s.session.Exchange); err != nil { + log.WithError(err).Errorf("graceful cancel order error") + } + + if err := s.activeAdjustmentOrders.GracefulCancel(ctx, s.session.Exchange); err != nil { + log.WithError(err).Errorf("graceful cancel order error") + } + + s.tradeCollector.Process() + }) + + return nil +} + +func anyOrderFilled(orders []types.Order) bool { + for _, o := range orders { + if o.ExecutedQuantity.Sign() > 0 { + return true + } + } + return false +} diff --git a/pkg/strategy/xbalance/strategy.go b/pkg/strategy/xbalance/strategy.go new file mode 100644 index 0000000000..4ac0e851b6 --- /dev/null +++ b/pkg/strategy/xbalance/strategy.go @@ -0,0 +1,405 @@ +package xbalance + +import ( + "context" + "encoding/json" + "fmt" + "sync" + "time" + + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" + "github.com/slack-go/slack" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/service" + "github.com/c9s/bbgo/pkg/types" + "github.com/c9s/bbgo/pkg/util" +) + +const ID = "xbalance" + +const stateKey = "state-v1" + +var priceFixer = fixedpoint.NewFromFloat(0.99) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type State struct { + Asset string `json:"asset"` + DailyNumberOfTransfers int `json:"dailyNumberOfTransfers,omitempty"` + DailyAmountOfTransfers fixedpoint.Value `json:"dailyAmountOfTransfers,omitempty"` + Since int64 `json:"since"` +} + +func (s *State) IsOver24Hours() bool { + return time.Since(time.Unix(s.Since, 0)) >= 24*time.Hour +} + +func (s *State) PlainText() string { + return util.Render(`{{ .Asset }} transfer stats: +daily number of transfers: {{ .DailyNumberOfTransfers }} +daily amount of transfers {{ .DailyAmountOfTransfers.Float64 }}`, s) +} + +func (s *State) SlackAttachment() slack.Attachment { + return slack.Attachment{ + // Pretext: "", + // Text: text, + Title: s.Asset + " Transfer States", + Fields: []slack.AttachmentField{ + {Title: "Total Number of Transfers", Value: fmt.Sprintf("%d", s.DailyNumberOfTransfers), Short: true}, + {Title: "Total Amount of Transfers", Value: util.FormatFloat(s.DailyAmountOfTransfers.Float64(), 4), Short: true}, + }, + Footer: util.Render("Since {{ . }}", time.Unix(s.Since, 0).Format(time.RFC822)), + } +} + +func (s *State) Reset() { + var beginningOfTheDay = util.BeginningOfTheDay(time.Now().Local()) + *s = State{ + DailyNumberOfTransfers: 0, + DailyAmountOfTransfers: fixedpoint.Zero, + Since: beginningOfTheDay.Unix(), + } +} + +type WithdrawalRequest struct { + FromSession string `json:"fromSession"` + ToSession string `json:"toSession"` + Asset string `json:"asset"` + Amount fixedpoint.Value `json:"amount"` +} + +func (r *WithdrawalRequest) String() string { + return fmt.Sprintf("WITHDRAWAL REQUEST: sending %s %s from %s -> %s", + r.Amount.FormatString(4), + r.Asset, + r.FromSession, + r.ToSession, + ) +} + +func (r *WithdrawalRequest) PlainText() string { + return fmt.Sprintf("Withdraw request: sending %s %s from %s -> %s", + r.Amount.FormatString(4), + r.Asset, + r.FromSession, + r.ToSession, + ) +} + +func (r *WithdrawalRequest) SlackAttachment() slack.Attachment { + var color = "#DC143C" + title := util.Render(`Withdraw Request {{ .Asset }}`, r) + return slack.Attachment{ + // Pretext: "", + // Text: text, + Title: title, + Color: color, + Fields: []slack.AttachmentField{ + {Title: "Asset", Value: r.Asset, Short: true}, + {Title: "Amount", Value: r.Amount.FormatString(4), Short: true}, + {Title: "From", Value: r.FromSession}, + {Title: "To", Value: r.ToSession}, + }, + Footer: util.Render("Time {{ . }}", time.Now().Format(time.RFC822)), + // FooterIcon: "", + } +} + +type Address struct { + Address string `json:"address"` + AddressTag string `json:"addressTag"` + Network string `json:"network"` + ForeignFee fixedpoint.Value `json:"foreignFee"` +} + +func (a *Address) UnmarshalJSON(body []byte) error { + var arg interface{} + err := json.Unmarshal(body, &arg) + if err != nil { + return err + } + + switch argT := arg.(type) { + case string: + a.Address = argT + return nil + } + + type addressTemplate Address + return json.Unmarshal(body, (*addressTemplate)(a)) +} + +type Strategy struct { + *bbgo.Graceful + *bbgo.Persistence + + Interval types.Duration `json:"interval"` + + Addresses map[string]Address `json:"addresses"` + + MaxDailyNumberOfTransfer int `json:"maxDailyNumberOfTransfer"` + MaxDailyAmountOfTransfer fixedpoint.Value `json:"maxDailyAmountOfTransfer"` + + CheckOnStart bool `json:"checkOnStart"` + + Asset string `json:"asset"` + + // Low is the low balance level for triggering transfer + Low fixedpoint.Value `json:"low"` + + // Middle is the middle balance level used for re-fill asset + Middle fixedpoint.Value `json:"middle"` + + Verbose bool `json:"verbose"` + + state *State +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) CrossSubscribe(sessions map[string]*bbgo.ExchangeSession) {} + +func (s *Strategy) checkBalance(ctx context.Context, sessions map[string]*bbgo.ExchangeSession) { + if s.Verbose { + bbgo.Notify("📝 Checking %s low balance level exchange session...", s.Asset) + } + + var total fixedpoint.Value + for _, session := range sessions { + if b, ok := session.GetAccount().Balance(s.Asset); ok { + total = total.Add(b.Total()) + } + } + + lowLevelSession, lowLevelBalance, err := s.findLowBalanceLevelSession(sessions) + if err != nil { + bbgo.Notify("Can not find low balance level session: %s", err.Error()) + log.WithError(err).Errorf("Can not find low balance level session") + return + } + + if lowLevelSession == nil { + if s.Verbose { + bbgo.Notify("✅ All %s balances are looking good, total value: %v", s.Asset, total) + } + return + } + + bbgo.Notify("⚠️ Found low level %s balance from session %s: %v", s.Asset, lowLevelSession.Name, lowLevelBalance) + + middle := s.Middle + if middle.IsZero() { + middle = total.Div(fixedpoint.NewFromInt(int64(len(sessions)))).Mul(priceFixer) + bbgo.Notify("Total value %v %s, setting middle to %v", total, s.Asset, middle) + } + + requiredAmount := middle.Sub(lowLevelBalance.Available) + + bbgo.Notify("Need %v %s to satisfy the middle balance level %v", requiredAmount, s.Asset, middle) + + fromSession, _, err := s.findHighestBalanceLevelSession(sessions, requiredAmount) + if err != nil || fromSession == nil { + bbgo.Notify("Can not find session with enough balance") + log.WithError(err).Errorf("can not find session with enough balance") + return + } + + withdrawalService, ok := fromSession.Exchange.(types.ExchangeWithdrawalService) + if !ok { + log.Errorf("exchange %s does not implement withdrawal service, we can not withdrawal", fromSession.ExchangeName) + return + } + + if !fromSession.Withdrawal { + bbgo.Notify("The withdrawal function exchange session %s is not enabled", fromSession.Name) + log.Errorf("The withdrawal function of exchange session %s is not enabled", fromSession.Name) + return + } + + toAddress, ok := s.Addresses[lowLevelSession.Name] + if !ok { + log.Errorf("%s address of session %s not found", s.Asset, lowLevelSession.Name) + bbgo.Notify("%s address of session %s not found", s.Asset, lowLevelSession.Name) + return + } + + if toAddress.ForeignFee.Sign() > 0 { + requiredAmount = requiredAmount.Add(toAddress.ForeignFee) + } + + if s.state != nil { + if s.MaxDailyNumberOfTransfer > 0 { + if s.state.DailyNumberOfTransfers >= s.MaxDailyNumberOfTransfer { + bbgo.Notify("⚠️ Exceeded %s max daily number of transfers %d (current %d), skipping transfer...", + s.Asset, + s.MaxDailyNumberOfTransfer, + s.state.DailyNumberOfTransfers) + return + } + } + + if s.MaxDailyAmountOfTransfer.Sign() > 0 { + if s.state.DailyAmountOfTransfers.Compare(s.MaxDailyAmountOfTransfer) >= 0 { + bbgo.Notify("⚠️ Exceeded %s max daily amount of transfers %v (current %v), skipping transfer...", + s.Asset, + s.MaxDailyAmountOfTransfer, + s.state.DailyAmountOfTransfers) + return + } + } + } + + bbgo.Notify(&WithdrawalRequest{ + FromSession: fromSession.Name, + ToSession: lowLevelSession.Name, + Asset: s.Asset, + Amount: requiredAmount, + }) + + if err := withdrawalService.Withdraw(ctx, s.Asset, requiredAmount, toAddress.Address, &types.WithdrawalOptions{ + Network: toAddress.Network, + AddressTag: toAddress.AddressTag, + }); err != nil { + log.WithError(err).Errorf("withdrawal failed") + bbgo.Notify("withdrawal request failed, error: %v", err) + return + } + + bbgo.Notify("%s withdrawal request sent", s.Asset) + + if s.state != nil { + if s.state.IsOver24Hours() { + s.state.Reset() + } + + s.state.DailyNumberOfTransfers += 1 + s.state.DailyAmountOfTransfers = s.state.DailyAmountOfTransfers.Add(requiredAmount) + s.SaveState() + } +} + +func (s *Strategy) findHighestBalanceLevelSession(sessions map[string]*bbgo.ExchangeSession, requiredAmount fixedpoint.Value) (*bbgo.ExchangeSession, types.Balance, error) { + var balance types.Balance + var maxBalanceLevel = fixedpoint.Zero + var maxBalanceSession *bbgo.ExchangeSession = nil + for sessionID := range s.Addresses { + session, ok := sessions[sessionID] + if !ok { + return nil, balance, fmt.Errorf("session %s does not exist", sessionID) + } + + if b, ok := session.GetAccount().Balance(s.Asset); ok { + if b.Available.Sub(requiredAmount).Compare(s.Low) > 0 && b.Available.Compare(maxBalanceLevel) > 0 { + maxBalanceLevel = b.Available + maxBalanceSession = session + balance = b + } + } + } + + return maxBalanceSession, balance, nil +} + +func (s *Strategy) findLowBalanceLevelSession(sessions map[string]*bbgo.ExchangeSession) (*bbgo.ExchangeSession, types.Balance, error) { + var balance types.Balance + for sessionID := range s.Addresses { + session, ok := sessions[sessionID] + if !ok { + return nil, balance, fmt.Errorf("session %s does not exist", sessionID) + } + + balance, ok = session.GetAccount().Balance(s.Asset) + if ok { + if balance.Available.Compare(s.Low) <= 0 { + return session, balance, nil + } + } + } + + return nil, balance, nil +} + +func (s *Strategy) SaveState() { + if err := s.Persistence.Save(s.state, ID, s.Asset, stateKey); err != nil { + log.WithError(err).Errorf("can not save state: %+v", s.state) + } else { + log.Infof("%s %s state is saved: %+v", ID, s.Asset, s.state) + bbgo.Notify("%s %s state is saved", ID, s.Asset, s.state) + } +} + +func (s *Strategy) newDefaultState() *State { + return &State{ + Asset: s.Asset, + DailyNumberOfTransfers: 0, + DailyAmountOfTransfers: fixedpoint.Zero, + } +} + +func (s *Strategy) LoadState() error { + var state State + if err := s.Persistence.Load(&state, ID, s.Asset, stateKey); err != nil { + if err != service.ErrPersistenceNotExists { + return err + } + + s.state = s.newDefaultState() + s.state.Reset() + } else { + // we loaded it successfully + s.state = &state + + // update Asset name for legacy caches + s.state.Asset = s.Asset + + log.Infof("%s %s state is restored: %+v", ID, s.Asset, s.state) + bbgo.Notify("%s %s state is restored", ID, s.Asset, s.state) + } + + return nil +} + +func (s *Strategy) CrossRun(ctx context.Context, _ bbgo.OrderExecutionRouter, sessions map[string]*bbgo.ExchangeSession) error { + if s.Interval == 0 { + return errors.New("interval can not be zero") + } + + if err := s.LoadState(); err != nil { + return err + } + + s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { + defer wg.Done() + + s.SaveState() + }) + + if s.CheckOnStart { + s.checkBalance(ctx, sessions) + } + + go func() { + ticker := time.NewTicker(util.MillisecondsJitter(s.Interval.Duration(), 1000)) + defer ticker.Stop() + + for { + select { + case <-ctx.Done(): + return + + case <-ticker.C: + s.checkBalance(ctx, sessions) + } + } + }() + + return nil +} diff --git a/pkg/strategy/xbalance/strategy_test.go b/pkg/strategy/xbalance/strategy_test.go new file mode 100644 index 0000000000..22ca3341da --- /dev/null +++ b/pkg/strategy/xbalance/strategy_test.go @@ -0,0 +1,19 @@ +package xbalance + +import ( + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/stretchr/testify/assert" +) + +func TestState_PlainText(t *testing.T) { + var state = State{ + Asset: "USDT", + DailyNumberOfTransfers: 1, + DailyAmountOfTransfers: fixedpoint.NewFromFloat(1000.0), + Since: 0, + } + + assert.Equal(t, "USDT transfer stats:\ndaily number of transfers: 1\ndaily amount of transfers 1000", state.PlainText()) +} diff --git a/pkg/strategy/xgap/strategy.go b/pkg/strategy/xgap/strategy.go new file mode 100644 index 0000000000..102ea2a42c --- /dev/null +++ b/pkg/strategy/xgap/strategy.go @@ -0,0 +1,387 @@ +package xgap + +import ( + "context" + "fmt" + "math" + "math/rand" + "sync" + "time" + + "github.com/sirupsen/logrus" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/service" + "github.com/c9s/bbgo/pkg/types" + "github.com/c9s/bbgo/pkg/util" +) + +const ID = "xgap" + +const stateKey = "state-v1" + +var log = logrus.WithField("strategy", ID) + +var StepPercentageGap = fixedpoint.NewFromFloat(0.05) +var NotionModifier = fixedpoint.NewFromFloat(1.01) +var Two = fixedpoint.NewFromInt(2) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +func (s *Strategy) ID() string { + return ID +} + +type State struct { + AccumulatedFeeStartedAt time.Time `json:"accumulatedFeeStartedAt,omitempty"` + AccumulatedFees map[string]fixedpoint.Value `json:"accumulatedFees,omitempty"` + AccumulatedVolume fixedpoint.Value `json:"accumulatedVolume,omitempty"` +} + +func (s *State) IsOver24Hours() bool { + return time.Since(s.AccumulatedFeeStartedAt) >= 24*time.Hour +} + +func (s *State) Reset() { + t := time.Now() + dateTime := time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location()) + + log.Infof("resetting accumulated started time to: %s", dateTime) + + s.AccumulatedFeeStartedAt = dateTime + s.AccumulatedFees = make(map[string]fixedpoint.Value) + s.AccumulatedVolume = fixedpoint.Zero +} + +type Strategy struct { + *bbgo.Graceful + *bbgo.Persistence + + Symbol string `json:"symbol"` + SourceExchange string `json:"sourceExchange"` + TradingExchange string `json:"tradingExchange"` + MinSpread fixedpoint.Value `json:"minSpread"` + Quantity fixedpoint.Value `json:"quantity"` + + DailyFeeBudgets map[string]fixedpoint.Value `json:"dailyFeeBudgets,omitempty"` + DailyMaxVolume fixedpoint.Value `json:"dailyMaxVolume,omitempty"` + UpdateInterval types.Duration `json:"updateInterval"` + SimulateVolume bool `json:"simulateVolume"` + + sourceSession, tradingSession *bbgo.ExchangeSession + sourceMarket, tradingMarket types.Market + + state *State + + mu sync.Mutex + lastSourceKLine, lastTradingKLine types.KLine + sourceBook, tradingBook *types.StreamOrderBook + groupID uint32 + + stopC chan struct{} +} + +func (s *Strategy) isBudgetAllowed() bool { + if s.DailyFeeBudgets == nil { + return true + } + + if s.state.AccumulatedFees == nil { + return true + } + + for asset, budget := range s.DailyFeeBudgets { + if fee, ok := s.state.AccumulatedFees[asset]; ok { + if fee.Compare(budget) >= 0 { + log.Warnf("accumulative fee %s exceeded the fee budget %s, skipping...", fee.String(), budget.String()) + return false + } + } + } + + return true +} + +func (s *Strategy) handleTradeUpdate(trade types.Trade) { + log.Infof("received trade %+v", trade) + + if trade.Symbol != s.Symbol { + return + } + + if s.state.IsOver24Hours() { + s.state.Reset() + } + + // safe check + if s.state.AccumulatedFees == nil { + s.state.AccumulatedFees = make(map[string]fixedpoint.Value) + } + + s.state.AccumulatedFees[trade.FeeCurrency] = s.state.AccumulatedFees[trade.FeeCurrency].Add(trade.Fee) + s.state.AccumulatedVolume = s.state.AccumulatedVolume.Add(trade.Quantity) + log.Infof("accumulated fee: %s %s", s.state.AccumulatedFees[trade.FeeCurrency].String(), trade.FeeCurrency) +} + +func (s *Strategy) CrossSubscribe(sessions map[string]*bbgo.ExchangeSession) { + sourceSession, ok := sessions[s.SourceExchange] + if !ok { + panic(fmt.Errorf("source session %s is not defined", s.SourceExchange)) + } + + sourceSession.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: "1m"}) + sourceSession.Subscribe(types.BookChannel, s.Symbol, types.SubscribeOptions{}) + + tradingSession, ok := sessions[s.TradingExchange] + if !ok { + panic(fmt.Errorf("trading session %s is not defined", s.TradingExchange)) + } + + tradingSession.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: "1m"}) + tradingSession.Subscribe(types.BookChannel, s.Symbol, types.SubscribeOptions{}) +} + +func (s *Strategy) CrossRun(ctx context.Context, _ bbgo.OrderExecutionRouter, sessions map[string]*bbgo.ExchangeSession) error { + if s.UpdateInterval == 0 { + s.UpdateInterval = types.Duration(time.Second) + } + + sourceSession, ok := sessions[s.SourceExchange] + if !ok { + return fmt.Errorf("source session %s is not defined", s.SourceExchange) + } + s.sourceSession = sourceSession + + tradingSession, ok := sessions[s.TradingExchange] + if !ok { + return fmt.Errorf("trading session %s is not defined", s.TradingExchange) + } + s.tradingSession = tradingSession + + s.sourceMarket, ok = s.sourceSession.Market(s.Symbol) + if !ok { + return fmt.Errorf("source session market %s is not defined", s.Symbol) + } + + s.tradingMarket, ok = s.tradingSession.Market(s.Symbol) + if !ok { + return fmt.Errorf("trading session market %s is not defined", s.Symbol) + } + + s.stopC = make(chan struct{}) + + var state State + // load position + if err := s.Persistence.Load(&state, ID, stateKey); err != nil { + if err != service.ErrPersistenceNotExists { + return err + } + + s.state = &State{} + s.state.Reset() + } else { + // loaded successfully + s.state = &state + log.Infof("state is restored: %+v", s.state) + + if s.state.IsOver24Hours() { + log.Warn("state is over 24 hours, resetting to zero") + s.state.Reset() + } + } + + s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { + defer wg.Done() + + close(s.stopC) + + if err := s.Persistence.Save(&s.state, ID, stateKey); err != nil { + log.WithError(err).Errorf("can not save state: %+v", s.state) + } else { + log.Infof("state is saved => %+v", s.state) + } + }) + + // from here, set data binding + s.sourceSession.MarketDataStream.OnKLine(func(kline types.KLine) { + log.Infof("source exchange %s price: %s volume: %s", + s.Symbol, kline.Close.String(), kline.Volume.String()) + s.mu.Lock() + s.lastSourceKLine = kline + s.mu.Unlock() + }) + s.tradingSession.MarketDataStream.OnKLine(func(kline types.KLine) { + log.Infof("trading exchange %s price: %s volume: %s", + s.Symbol, kline.Close.String(), kline.Volume.String()) + s.mu.Lock() + s.lastTradingKLine = kline + s.mu.Unlock() + }) + + s.sourceBook = types.NewStreamBook(s.Symbol) + s.sourceBook.BindStream(s.sourceSession.MarketDataStream) + + s.tradingBook = types.NewStreamBook(s.Symbol) + s.tradingBook.BindStream(s.tradingSession.MarketDataStream) + + s.tradingSession.UserDataStream.OnTradeUpdate(s.handleTradeUpdate) + + instanceID := fmt.Sprintf("%s-%s", ID, s.Symbol) + s.groupID = util.FNV32(instanceID) + log.Infof("using group id %d from fnv32(%s)", s.groupID, instanceID) + + go func() { + ticker := time.NewTicker( + util.MillisecondsJitter(s.UpdateInterval.Duration(), 1000), + ) + defer ticker.Stop() + + for { + select { + case <-ctx.Done(): + return + + case <-s.stopC: + return + + case <-ticker.C: + if !s.isBudgetAllowed() { + continue + } + + // < 10 seconds jitter sleep + delay := util.MillisecondsJitter(s.UpdateInterval.Duration(), 10*1000) + if delay < s.UpdateInterval.Duration() { + time.Sleep(delay) + } + + bestBid, hasBid := s.tradingBook.BestBid() + bestAsk, hasAsk := s.tradingBook.BestAsk() + + // try to use the bid/ask price from the trading book + if hasBid && hasAsk { + var spread = bestAsk.Price.Sub(bestBid.Price) + var spreadPercentage = spread.Div(bestAsk.Price) + log.Infof("trading book spread=%s %s", + spread.String(), spreadPercentage.Percentage()) + + // use the source book price if the spread percentage greater than 10% + if spreadPercentage.Compare(StepPercentageGap) > 0 { + log.Warnf("spread too large (%s %s), using source book", + spread.String(), spreadPercentage.Percentage()) + bestBid, hasBid = s.sourceBook.BestBid() + bestAsk, hasAsk = s.sourceBook.BestAsk() + } + + if s.MinSpread.Sign() > 0 { + if spread.Compare(s.MinSpread) < 0 { + log.Warnf("spread < min spread, spread=%s minSpread=%s bid=%s ask=%s", + spread.String(), s.MinSpread.String(), + bestBid.Price.String(), bestAsk.Price.String()) + continue + } + } + + // if the spread is less than 100 ticks (100 pips), skip + if spread.Compare(s.tradingMarket.TickSize.MulExp(2)) < 0 { + log.Warnf("spread too small, we can't place orders: spread=%v bid=%v ask=%v", + spread, bestBid.Price, bestAsk.Price) + continue + } + + } else { + bestBid, hasBid = s.sourceBook.BestBid() + bestAsk, hasAsk = s.sourceBook.BestAsk() + } + + if !hasBid || !hasAsk { + log.Warn("no bids or asks on the source book or the trading book") + continue + } + + var spread = bestAsk.Price.Sub(bestBid.Price) + var spreadPercentage = spread.Div(bestAsk.Price) + log.Infof("spread=%v %s ask=%v bid=%v", + spread, spreadPercentage.Percentage(), + bestAsk.Price, bestBid.Price) + // var spreadPercentage = spread.Float64() / bestBid.Price.Float64() + + var midPrice = bestAsk.Price.Add(bestBid.Price).Div(Two) + var price = midPrice + + log.Infof("mid price %v", midPrice) + + var balances = s.tradingSession.GetAccount().Balances() + var quantity = s.tradingMarket.MinQuantity + + if s.Quantity.Sign() > 0 { + quantity = fixedpoint.Min(s.Quantity, s.tradingMarket.MinQuantity) + } else if s.SimulateVolume { + s.mu.Lock() + if s.lastTradingKLine.Volume.Sign() > 0 && s.lastSourceKLine.Volume.Sign() > 0 { + volumeDiff := s.lastSourceKLine.Volume.Sub(s.lastTradingKLine.Volume) + // change the current quantity only diff is positive + if volumeDiff.Sign() > 0 { + quantity = volumeDiff + } + + if baseBalance, ok := balances[s.tradingMarket.BaseCurrency]; ok { + quantity = fixedpoint.Min(quantity, baseBalance.Available) + } + + if quoteBalance, ok := balances[s.tradingMarket.QuoteCurrency]; ok { + maxQuantity := quoteBalance.Available.Div(price) + quantity = fixedpoint.Min(quantity, maxQuantity) + } + } + s.mu.Unlock() + } else { + // plus a 2% quantity jitter + jitter := 1.0 + math.Max(0.02, rand.Float64()) + quantity = quantity.Mul(fixedpoint.NewFromFloat(jitter)) + } + + var quoteAmount = price.Mul(quantity) + if quoteAmount.Compare(s.tradingMarket.MinNotional) <= 0 { + quantity = fixedpoint.Max( + s.tradingMarket.MinQuantity, + s.tradingMarket.MinNotional.Mul(NotionModifier).Div(price)) + } + + createdOrders, err := tradingSession.Exchange.SubmitOrders(ctx, types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeBuy, + Type: types.OrderTypeLimit, + Quantity: quantity, + Price: price, + Market: s.tradingMarket, + // TimeInForce: types.TimeInForceGTC, + GroupID: s.groupID, + }, types.SubmitOrder{ + Symbol: s.Symbol, + Side: types.SideTypeSell, + Type: types.OrderTypeLimit, + Quantity: quantity, + Price: price, + Market: s.tradingMarket, + // TimeInForce: types.TimeInForceGTC, + GroupID: s.groupID, + }) + if err != nil { + log.WithError(err).Error("order submit error") + } + + time.Sleep(time.Second) + + if err := tradingSession.Exchange.CancelOrders(ctx, createdOrders...); err != nil { + log.WithError(err).Error("cancel order error") + } + } + } + }() + + return nil +} diff --git a/pkg/strategy/xmaker/state.go b/pkg/strategy/xmaker/state.go new file mode 100644 index 0000000000..1b61697305 --- /dev/null +++ b/pkg/strategy/xmaker/state.go @@ -0,0 +1,67 @@ +package xmaker + +import ( + "sync" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" +) + +type State struct { + CoveredPosition fixedpoint.Value `json:"coveredPosition,omitempty"` + + // Deprecated: + Position *types.Position `json:"position,omitempty"` + + // Deprecated: + ProfitStats ProfitStats `json:"profitStats,omitempty"` +} + +type ProfitStats struct { + *types.ProfitStats + + lock sync.Mutex + + MakerExchange types.ExchangeName `json:"makerExchange"` + + AccumulatedMakerVolume fixedpoint.Value `json:"accumulatedMakerVolume,omitempty"` + AccumulatedMakerBidVolume fixedpoint.Value `json:"accumulatedMakerBidVolume,omitempty"` + AccumulatedMakerAskVolume fixedpoint.Value `json:"accumulatedMakerAskVolume,omitempty"` + + TodayMakerVolume fixedpoint.Value `json:"todayMakerVolume,omitempty"` + TodayMakerBidVolume fixedpoint.Value `json:"todayMakerBidVolume,omitempty"` + TodayMakerAskVolume fixedpoint.Value `json:"todayMakerAskVolume,omitempty"` +} + +func (s *ProfitStats) AddTrade(trade types.Trade) { + s.ProfitStats.AddTrade(trade) + + if trade.Exchange == s.MakerExchange { + s.lock.Lock() + s.AccumulatedMakerVolume = s.AccumulatedMakerVolume.Add(trade.Quantity) + s.TodayMakerVolume = s.TodayMakerVolume.Add(trade.Quantity) + + switch trade.Side { + + case types.SideTypeSell: + s.AccumulatedMakerAskVolume = s.AccumulatedMakerAskVolume.Add(trade.Quantity) + s.TodayMakerAskVolume = s.TodayMakerAskVolume.Add(trade.Quantity) + + case types.SideTypeBuy: + s.AccumulatedMakerBidVolume = s.AccumulatedMakerBidVolume.Add(trade.Quantity) + s.TodayMakerBidVolume = s.TodayMakerBidVolume.Add(trade.Quantity) + + } + s.lock.Unlock() + } +} + +func (s *ProfitStats) ResetToday() { + s.ProfitStats.ResetToday() + + s.lock.Lock() + s.TodayMakerVolume = fixedpoint.Zero + s.TodayMakerBidVolume = fixedpoint.Zero + s.TodayMakerAskVolume = fixedpoint.Zero + s.lock.Unlock() +} diff --git a/pkg/strategy/xmaker/strategy.go b/pkg/strategy/xmaker/strategy.go new file mode 100644 index 0000000000..289b942083 --- /dev/null +++ b/pkg/strategy/xmaker/strategy.go @@ -0,0 +1,901 @@ +package xmaker + +import ( + "context" + "fmt" + "sync" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "golang.org/x/time/rate" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/indicator" + "github.com/c9s/bbgo/pkg/types" + "github.com/c9s/bbgo/pkg/util" +) + +var defaultMargin = fixedpoint.NewFromFloat(0.003) +var Two = fixedpoint.NewFromInt(2) + +const priceUpdateTimeout = 30 * time.Second + +const ID = "xmaker" + +const stateKey = "state-v1" + +var log = logrus.WithField("strategy", ID) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type Strategy struct { + *bbgo.Graceful + *bbgo.Persistence + Environment *bbgo.Environment + + Symbol string `json:"symbol"` + + // SourceExchange session name + SourceExchange string `json:"sourceExchange"` + + // MakerExchange session name + MakerExchange string `json:"makerExchange"` + + UpdateInterval types.Duration `json:"updateInterval"` + HedgeInterval types.Duration `json:"hedgeInterval"` + OrderCancelWaitTime types.Duration `json:"orderCancelWaitTime"` + + Margin fixedpoint.Value `json:"margin"` + BidMargin fixedpoint.Value `json:"bidMargin"` + AskMargin fixedpoint.Value `json:"askMargin"` + UseDepthPrice bool `json:"useDepthPrice"` + DepthQuantity fixedpoint.Value `json:"depthQuantity"` + + EnableBollBandMargin bool `json:"enableBollBandMargin"` + BollBandInterval types.Interval `json:"bollBandInterval"` + BollBandMargin fixedpoint.Value `json:"bollBandMargin"` + BollBandMarginFactor fixedpoint.Value `json:"bollBandMarginFactor"` + + StopHedgeQuoteBalance fixedpoint.Value `json:"stopHedgeQuoteBalance"` + StopHedgeBaseBalance fixedpoint.Value `json:"stopHedgeBaseBalance"` + + // Quantity is used for fixed quantity of the first layer + Quantity fixedpoint.Value `json:"quantity"` + + // QuantityMultiplier is the factor that multiplies the quantity of the previous layer + QuantityMultiplier fixedpoint.Value `json:"quantityMultiplier"` + + // QuantityScale helps user to define the quantity by layer scale + QuantityScale *bbgo.LayerScale `json:"quantityScale,omitempty"` + + // MaxExposurePosition defines the unhedged quantity of stop + MaxExposurePosition fixedpoint.Value `json:"maxExposurePosition"` + + DisableHedge bool `json:"disableHedge"` + + NotifyTrade bool `json:"notifyTrade"` + + NumLayers int `json:"numLayers"` + + // Pips is the pips of the layer prices + Pips fixedpoint.Value `json:"pips"` + + // -------------------------------- + // private field + + makerSession, sourceSession *bbgo.ExchangeSession + + makerMarket, sourceMarket types.Market + + // boll is the BOLLINGER indicator we used for predicting the price. + boll *indicator.BOLL + + state *State + + // persistence fields + Position *types.Position `json:"position,omitempty" persistence:"position"` + ProfitStats *ProfitStats `json:"profitStats,omitempty" persistence:"profit_stats"` + CoveredPosition fixedpoint.Value `json:"coveredPosition,omitempty" persistence:"covered_position"` + + book *types.StreamOrderBook + activeMakerOrders *bbgo.ActiveOrderBook + + hedgeErrorLimiter *rate.Limiter + hedgeErrorRateReservation *rate.Reservation + + orderStore *bbgo.OrderStore + tradeCollector *bbgo.TradeCollector + + askPriceHeartBeat, bidPriceHeartBeat types.PriceHeartBeat + + lastPrice fixedpoint.Value + groupID uint32 + + stopC chan struct{} +} + +func (s *Strategy) ID() string { + return ID +} + +func (s *Strategy) InstanceID() string { + return fmt.Sprintf("%s:%s", ID, s.Symbol) +} + +func (s *Strategy) CrossSubscribe(sessions map[string]*bbgo.ExchangeSession) { + sourceSession, ok := sessions[s.SourceExchange] + if !ok { + panic(fmt.Errorf("source session %s is not defined", s.SourceExchange)) + } + + sourceSession.Subscribe(types.BookChannel, s.Symbol, types.SubscribeOptions{}) + sourceSession.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: "1m"}) + + makerSession, ok := sessions[s.MakerExchange] + if !ok { + panic(fmt.Errorf("maker session %s is not defined", s.MakerExchange)) + } + makerSession.Subscribe(types.KLineChannel, s.Symbol, types.SubscribeOptions{Interval: "1m"}) +} + +func aggregatePrice(pvs types.PriceVolumeSlice, requiredQuantity fixedpoint.Value) (price fixedpoint.Value) { + q := requiredQuantity + totalAmount := fixedpoint.Zero + + if len(pvs) == 0 { + price = fixedpoint.Zero + return price + } else if pvs[0].Volume.Compare(requiredQuantity) >= 0 { + return pvs[0].Price + } + + for i := 0; i < len(pvs); i++ { + pv := pvs[i] + if pv.Volume.Compare(q) >= 0 { + totalAmount = totalAmount.Add(q.Mul(pv.Price)) + break + } + + q = q.Sub(pv.Volume) + totalAmount = totalAmount.Add(pv.Volume.Mul(pv.Price)) + } + + price = totalAmount.Div(requiredQuantity) + return price +} + +func (s *Strategy) updateQuote(ctx context.Context, orderExecutionRouter bbgo.OrderExecutionRouter) { + if err := s.activeMakerOrders.GracefulCancel(ctx, s.makerSession.Exchange); err != nil { + log.Warnf("there are some %s orders not canceled, skipping placing maker orders", s.Symbol) + s.activeMakerOrders.Print() + return + } + + if s.activeMakerOrders.NumOfOrders() > 0 { + return + } + + bestBid, bestAsk, hasPrice := s.book.BestBidAndAsk() + if !hasPrice { + return + } + + // use mid-price for the last price + s.lastPrice = bestBid.Price.Add(bestAsk.Price).Div(Two) + + bookLastUpdateTime := s.book.LastUpdateTime() + + if _, err := s.bidPriceHeartBeat.Update(bestBid, priceUpdateTimeout); err != nil { + log.WithError(err).Errorf("quote update error, %s price not updating, order book last update: %s ago", + s.Symbol, + time.Since(bookLastUpdateTime)) + return + } + + if _, err := s.askPriceHeartBeat.Update(bestAsk, priceUpdateTimeout); err != nil { + log.WithError(err).Errorf("quote update error, %s price not updating, order book last update: %s ago", + s.Symbol, + time.Since(bookLastUpdateTime)) + return + } + + sourceBook := s.book.CopyDepth(10) + if valid, err := sourceBook.IsValid(); !valid { + log.WithError(err).Errorf("%s invalid copied order book, skip quoting: %v", s.Symbol, err) + return + } + + var disableMakerBid = false + var disableMakerAsk = false + + // check maker's balance quota + // we load the balances from the account while we're generating the orders, + // the balance may have a chance to be deducted by other strategies or manual orders submitted by the user + makerBalances := s.makerSession.GetAccount().Balances() + makerQuota := &bbgo.QuotaTransaction{} + if b, ok := makerBalances[s.makerMarket.BaseCurrency]; ok { + if b.Available.Compare(s.makerMarket.MinQuantity) > 0 { + makerQuota.BaseAsset.Add(b.Available) + } else { + disableMakerAsk = true + } + } + + if b, ok := makerBalances[s.makerMarket.QuoteCurrency]; ok { + if b.Available.Compare(s.makerMarket.MinNotional) > 0 { + makerQuota.QuoteAsset.Add(b.Available) + } else { + disableMakerBid = true + } + } + + hedgeBalances := s.sourceSession.GetAccount().Balances() + hedgeQuota := &bbgo.QuotaTransaction{} + if b, ok := hedgeBalances[s.sourceMarket.BaseCurrency]; ok { + // to make bid orders, we need enough base asset in the foreign exchange, + // if the base asset balance is not enough for selling + if s.StopHedgeBaseBalance.Sign() > 0 { + minAvailable := s.StopHedgeBaseBalance.Add(s.sourceMarket.MinQuantity) + if b.Available.Compare(minAvailable) > 0 { + hedgeQuota.BaseAsset.Add(b.Available.Sub(minAvailable)) + } else { + log.Warnf("%s maker bid disabled: insufficient base balance %s", s.Symbol, b.String()) + disableMakerBid = true + } + } else if b.Available.Compare(s.sourceMarket.MinQuantity) > 0 { + hedgeQuota.BaseAsset.Add(b.Available) + } else { + log.Warnf("%s maker bid disabled: insufficient base balance %s", s.Symbol, b.String()) + disableMakerBid = true + } + } + + if b, ok := hedgeBalances[s.sourceMarket.QuoteCurrency]; ok { + // to make ask orders, we need enough quote asset in the foreign exchange, + // if the quote asset balance is not enough for buying + if s.StopHedgeQuoteBalance.Sign() > 0 { + minAvailable := s.StopHedgeQuoteBalance.Add(s.sourceMarket.MinNotional) + if b.Available.Compare(minAvailable) > 0 { + hedgeQuota.QuoteAsset.Add(b.Available.Sub(minAvailable)) + } else { + log.Warnf("%s maker ask disabled: insufficient quote balance %s", s.Symbol, b.String()) + disableMakerAsk = true + } + } else if b.Available.Compare(s.sourceMarket.MinNotional) > 0 { + hedgeQuota.QuoteAsset.Add(b.Available) + } else { + log.Warnf("%s maker ask disabled: insufficient quote balance %s", s.Symbol, b.String()) + disableMakerAsk = true + } + } + + // if max exposure position is configured, we should not: + // 1. place bid orders when we already bought too much + // 2. place ask orders when we already sold too much + if s.MaxExposurePosition.Sign() > 0 { + pos := s.Position.GetBase() + + if pos.Compare(s.MaxExposurePosition.Neg()) > 0 { + // stop sell if we over-sell + disableMakerAsk = true + } else if pos.Compare(s.MaxExposurePosition) > 0 { + // stop buy if we over buy + disableMakerBid = true + } + } + + if disableMakerAsk && disableMakerBid { + log.Warnf("%s bid/ask maker is disabled due to insufficient balances", s.Symbol) + return + } + + bestBidPrice := bestBid.Price + bestAskPrice := bestAsk.Price + log.Infof("%s book ticker: best ask / best bid = %v / %v", s.Symbol, bestAskPrice, bestBidPrice) + + var submitOrders []types.SubmitOrder + var accumulativeBidQuantity, accumulativeAskQuantity fixedpoint.Value + var bidQuantity = s.Quantity + var askQuantity = s.Quantity + var bidMargin = s.BidMargin + var askMargin = s.AskMargin + var pips = s.Pips + + if s.EnableBollBandMargin { + lastDownBand := fixedpoint.NewFromFloat(s.boll.LastDownBand()) + lastUpBand := fixedpoint.NewFromFloat(s.boll.LastUpBand()) + + if lastUpBand.IsZero() || lastDownBand.IsZero() { + log.Warnf("bollinger band value is zero, skipping") + return + } + + log.Infof("bollinger band: up/down = %f/%f", lastUpBand.Float64(), lastDownBand.Float64()) + + // when bid price is lower than the down band, then it's in the downtrend + // when ask price is higher than the up band, then it's in the uptrend + if bestBidPrice.Compare(lastDownBand) < 0 { + // ratio here should be greater than 1.00 + ratio := lastDownBand.Div(bestBidPrice) + + // so that the original bid margin can be multiplied by 1.x + bollMargin := s.BollBandMargin.Mul(ratio).Mul(s.BollBandMarginFactor) + + log.Infof("%s bollband downtrend: adjusting ask margin %v + %v = %v", + s.Symbol, + askMargin, + bollMargin, + askMargin.Add(bollMargin)) + + askMargin = askMargin.Add(bollMargin) + pips = pips.Mul(ratio) + } + + if bestAskPrice.Compare(lastUpBand) > 0 { + // ratio here should be greater than 1.00 + ratio := bestAskPrice.Div(lastUpBand) + + // so that the original bid margin can be multiplied by 1.x + bollMargin := s.BollBandMargin.Mul(ratio).Mul(s.BollBandMarginFactor) + + log.Infof("%s bollband uptrend adjusting bid margin %v + %v = %v", + s.Symbol, + bidMargin, + bollMargin, + bidMargin.Add(bollMargin)) + + bidMargin = bidMargin.Add(bollMargin) + pips = pips.Mul(ratio) + } + } + + bidPrice := bestBidPrice + askPrice := bestAskPrice + for i := 0; i < s.NumLayers; i++ { + // for maker bid orders + if !disableMakerBid { + if s.QuantityScale != nil { + qf, err := s.QuantityScale.Scale(i + 1) + if err != nil { + log.WithError(err).Errorf("quantityScale error") + return + } + + log.Infof("%s scaling bid #%d quantity to %f", s.Symbol, i+1, qf) + + // override the default bid quantity + bidQuantity = fixedpoint.NewFromFloat(qf) + } + + accumulativeBidQuantity = accumulativeBidQuantity.Add(bidQuantity) + if s.UseDepthPrice { + if s.DepthQuantity.Sign() > 0 { + bidPrice = aggregatePrice(sourceBook.SideBook(types.SideTypeBuy), s.DepthQuantity) + } else { + bidPrice = aggregatePrice(sourceBook.SideBook(types.SideTypeBuy), accumulativeBidQuantity) + } + } + + bidPrice = bidPrice.Mul(fixedpoint.One.Sub(bidMargin)) + if i > 0 && pips.Sign() > 0 { + bidPrice = bidPrice.Sub(pips.Mul(fixedpoint.NewFromInt(int64(i)). + Mul(s.makerMarket.TickSize))) + } + + if makerQuota.QuoteAsset.Lock(bidQuantity.Mul(bidPrice)) && hedgeQuota.BaseAsset.Lock(bidQuantity) { + // if we bought, then we need to sell the base from the hedge session + submitOrders = append(submitOrders, types.SubmitOrder{ + Symbol: s.Symbol, + Type: types.OrderTypeLimit, + Side: types.SideTypeBuy, + Price: bidPrice, + Quantity: bidQuantity, + TimeInForce: types.TimeInForceGTC, + GroupID: s.groupID, + }) + + makerQuota.Commit() + hedgeQuota.Commit() + } else { + makerQuota.Rollback() + hedgeQuota.Rollback() + } + + if s.QuantityMultiplier.Sign() > 0 { + bidQuantity = bidQuantity.Mul(s.QuantityMultiplier) + } + } + + // for maker ask orders + if !disableMakerAsk { + if s.QuantityScale != nil { + qf, err := s.QuantityScale.Scale(i + 1) + if err != nil { + log.WithError(err).Errorf("quantityScale error") + return + } + + log.Infof("%s scaling ask #%d quantity to %f", s.Symbol, i+1, qf) + + // override the default bid quantity + askQuantity = fixedpoint.NewFromFloat(qf) + } + accumulativeAskQuantity = accumulativeAskQuantity.Add(askQuantity) + + if s.UseDepthPrice { + if s.DepthQuantity.Sign() > 0 { + askPrice = aggregatePrice(sourceBook.SideBook(types.SideTypeSell), s.DepthQuantity) + } else { + askPrice = aggregatePrice(sourceBook.SideBook(types.SideTypeSell), accumulativeAskQuantity) + } + } + + askPrice = askPrice.Mul(fixedpoint.One.Add(askMargin)) + if i > 0 && pips.Sign() > 0 { + askPrice = askPrice.Add(pips.Mul(fixedpoint.NewFromInt(int64(i)).Mul(s.makerMarket.TickSize))) + } + + if makerQuota.BaseAsset.Lock(askQuantity) && hedgeQuota.QuoteAsset.Lock(askQuantity.Mul(askPrice)) { + // if we bought, then we need to sell the base from the hedge session + submitOrders = append(submitOrders, types.SubmitOrder{ + Symbol: s.Symbol, + Market: s.makerMarket, + Type: types.OrderTypeLimit, + Side: types.SideTypeSell, + Price: askPrice, + Quantity: askQuantity, + TimeInForce: types.TimeInForceGTC, + GroupID: s.groupID, + }) + makerQuota.Commit() + hedgeQuota.Commit() + } else { + makerQuota.Rollback() + hedgeQuota.Rollback() + } + + if s.QuantityMultiplier.Sign() > 0 { + askQuantity = askQuantity.Mul(s.QuantityMultiplier) + } + } + } + + if len(submitOrders) == 0 { + log.Warnf("no orders generated") + return + } + + makerOrders, err := orderExecutionRouter.SubmitOrdersTo(ctx, s.MakerExchange, submitOrders...) + if err != nil { + log.WithError(err).Errorf("order error: %s", err.Error()) + return + } + + s.activeMakerOrders.Add(makerOrders...) + s.orderStore.Add(makerOrders...) +} + +var lastPriceModifier = fixedpoint.NewFromFloat(1.001) +var minGap = fixedpoint.NewFromFloat(1.02) + +func (s *Strategy) Hedge(ctx context.Context, pos fixedpoint.Value) { + side := types.SideTypeBuy + if pos.IsZero() { + return + } + + quantity := pos.Abs() + + if pos.Sign() < 0 { + side = types.SideTypeSell + } + + lastPrice := s.lastPrice + sourceBook := s.book.CopyDepth(1) + switch side { + + case types.SideTypeBuy: + if bestAsk, ok := sourceBook.BestAsk(); ok { + lastPrice = bestAsk.Price + } + + case types.SideTypeSell: + if bestBid, ok := sourceBook.BestBid(); ok { + lastPrice = bestBid.Price + } + } + + notional := quantity.Mul(lastPrice) + if notional.Compare(s.sourceMarket.MinNotional) <= 0 { + log.Warnf("%s %v less than min notional, skipping hedge", s.Symbol, notional) + return + } + + // adjust quantity according to the balances + account := s.sourceSession.GetAccount() + switch side { + + case types.SideTypeBuy: + // check quote quantity + if quote, ok := account.Balance(s.sourceMarket.QuoteCurrency); ok { + if quote.Available.Compare(notional) < 0 { + // adjust price to higher 0.1%, so that we can ensure that the order can be executed + quantity = bbgo.AdjustQuantityByMaxAmount(quantity, lastPrice.Mul(lastPriceModifier), quote.Available) + quantity = s.sourceMarket.TruncateQuantity(quantity) + } + } + + case types.SideTypeSell: + // check quote quantity + if base, ok := account.Balance(s.sourceMarket.BaseCurrency); ok { + if base.Available.Compare(quantity) < 0 { + quantity = base.Available + } + } + } + + // truncate quantity for the supported precision + quantity = s.sourceMarket.TruncateQuantity(quantity) + + if notional.Compare(s.sourceMarket.MinNotional.Mul(minGap)) <= 0 { + log.Warnf("the adjusted amount %v is less than minimal notional %v, skipping hedge", notional, s.sourceMarket.MinNotional) + return + } + + if quantity.Compare(s.sourceMarket.MinQuantity.Mul(minGap)) <= 0 { + log.Warnf("the adjusted quantity %v is less than minimal quantity %v, skipping hedge", quantity, s.sourceMarket.MinQuantity) + return + } + + if s.hedgeErrorRateReservation != nil { + if !s.hedgeErrorRateReservation.OK() { + return + } + bbgo.Notify("Hit hedge error rate limit, waiting...") + time.Sleep(s.hedgeErrorRateReservation.Delay()) + s.hedgeErrorRateReservation = nil + } + + log.Infof("submitting %s hedge order %s %v", s.Symbol, side.String(), quantity) + bbgo.Notify("Submitting %s hedge order %s %v", s.Symbol, side.String(), quantity) + orderExecutor := &bbgo.ExchangeOrderExecutor{Session: s.sourceSession} + returnOrders, err := orderExecutor.SubmitOrders(ctx, types.SubmitOrder{ + Market: s.sourceMarket, + Symbol: s.Symbol, + Type: types.OrderTypeMarket, + Side: side, + Quantity: quantity, + }) + + if err != nil { + s.hedgeErrorRateReservation = s.hedgeErrorLimiter.Reserve() + log.WithError(err).Errorf("market order submit error: %s", err.Error()) + return + } + + // if it's selling, than we should add positive position + if side == types.SideTypeSell { + s.CoveredPosition = s.CoveredPosition.Add(quantity) + } else { + s.CoveredPosition = s.CoveredPosition.Add(quantity.Neg()) + } + + s.orderStore.Add(returnOrders...) +} + +func (s *Strategy) Validate() error { + if s.Quantity.IsZero() || s.QuantityScale == nil { + return errors.New("quantity or quantityScale can not be empty") + } + + if !s.QuantityMultiplier.IsZero() && s.QuantityMultiplier.Sign() < 0 { + return errors.New("quantityMultiplier can not be a negative number") + } + + if len(s.Symbol) == 0 { + return errors.New("symbol is required") + } + + return nil +} + +func (s *Strategy) LoadState() error { + var state State + + // load position + if err := s.Persistence.Load(&state, ID, s.Symbol, stateKey); err == nil { + s.state = &state + } + + return nil +} + +func (s *Strategy) CrossRun(ctx context.Context, orderExecutionRouter bbgo.OrderExecutionRouter, sessions map[string]*bbgo.ExchangeSession) error { + if s.BollBandInterval == "" { + s.BollBandInterval = types.Interval1m + } + + if s.BollBandMarginFactor.IsZero() { + s.BollBandMarginFactor = fixedpoint.One + } + if s.BollBandMargin.IsZero() { + s.BollBandMargin = fixedpoint.NewFromFloat(0.001) + } + + // configure default values + if s.UpdateInterval == 0 { + s.UpdateInterval = types.Duration(time.Second) + } + + if s.HedgeInterval == 0 { + s.HedgeInterval = types.Duration(10 * time.Second) + } + + if s.NumLayers == 0 { + s.NumLayers = 1 + } + + if s.BidMargin.IsZero() { + if !s.Margin.IsZero() { + s.BidMargin = s.Margin + } else { + s.BidMargin = defaultMargin + } + } + + if s.AskMargin.IsZero() { + if !s.Margin.IsZero() { + s.AskMargin = s.Margin + } else { + s.AskMargin = defaultMargin + } + } + + s.hedgeErrorLimiter = rate.NewLimiter(rate.Every(1*time.Minute), 1) + + // configure sessions + sourceSession, ok := sessions[s.SourceExchange] + if !ok { + return fmt.Errorf("source exchange session %s is not defined", s.SourceExchange) + } + + s.sourceSession = sourceSession + + makerSession, ok := sessions[s.MakerExchange] + if !ok { + return fmt.Errorf("maker exchange session %s is not defined", s.MakerExchange) + } + + s.makerSession = makerSession + + s.sourceMarket, ok = s.sourceSession.Market(s.Symbol) + if !ok { + return fmt.Errorf("source session market %s is not defined", s.Symbol) + } + + s.makerMarket, ok = s.makerSession.Market(s.Symbol) + if !ok { + return fmt.Errorf("maker session market %s is not defined", s.Symbol) + } + + standardIndicatorSet, ok := s.sourceSession.StandardIndicatorSet(s.Symbol) + if !ok { + return fmt.Errorf("%s standard indicator set not found", s.Symbol) + } + + s.boll = standardIndicatorSet.BOLL(types.IntervalWindow{ + Interval: s.BollBandInterval, + Window: 21, + }, 1.0) + + if store, ok := s.sourceSession.MarketDataStore(s.Symbol); ok { + if klines, ok2 := store.KLinesOfInterval(s.BollBandInterval); ok2 { + s.boll.Update(*klines) + } + } + + // restore state + instanceID := s.InstanceID() + s.groupID = util.FNV32(instanceID) + log.Infof("using group id %d from fnv(%s)", s.groupID, instanceID) + + if err := s.LoadState(); err != nil { + return err + } + + if s.Position == nil { + if s.state != nil && s.state.Position != nil { + s.Position = s.state.Position + } else { + s.Position = types.NewPositionFromMarket(s.makerMarket) + } + + // force update for legacy code + s.Position.Market = s.makerMarket + } + + bbgo.Notify("xmaker: %s position is restored", s.Symbol, s.Position) + + if s.ProfitStats == nil { + if s.state != nil { + p2 := s.state.ProfitStats + s.ProfitStats = &p2 + } else { + s.ProfitStats = &ProfitStats{ + ProfitStats: types.NewProfitStats(s.makerMarket), + MakerExchange: s.makerSession.ExchangeName, + } + } + } + + if s.CoveredPosition.IsZero() { + if s.state != nil && !s.CoveredPosition.IsZero() { + s.CoveredPosition = s.state.CoveredPosition + } + } + + if s.makerSession.MakerFeeRate.Sign() > 0 || s.makerSession.TakerFeeRate.Sign() > 0 { + s.Position.SetExchangeFeeRate(types.ExchangeName(s.MakerExchange), types.ExchangeFee{ + MakerFeeRate: s.makerSession.MakerFeeRate, + TakerFeeRate: s.makerSession.TakerFeeRate, + }) + } + + if s.sourceSession.MakerFeeRate.Sign() > 0 || s.sourceSession.TakerFeeRate.Sign() > 0 { + s.Position.SetExchangeFeeRate(types.ExchangeName(s.SourceExchange), types.ExchangeFee{ + MakerFeeRate: s.sourceSession.MakerFeeRate, + TakerFeeRate: s.sourceSession.TakerFeeRate, + }) + } + + s.book = types.NewStreamBook(s.Symbol) + s.book.BindStream(s.sourceSession.MarketDataStream) + + s.activeMakerOrders = bbgo.NewActiveOrderBook(s.Symbol) + s.activeMakerOrders.BindStream(s.makerSession.UserDataStream) + + s.orderStore = bbgo.NewOrderStore(s.Symbol) + s.orderStore.BindStream(s.sourceSession.UserDataStream) + s.orderStore.BindStream(s.makerSession.UserDataStream) + + s.tradeCollector = bbgo.NewTradeCollector(s.Symbol, s.Position, s.orderStore) + + if s.NotifyTrade { + s.tradeCollector.OnTrade(func(trade types.Trade, profit, netProfit fixedpoint.Value) { + bbgo.Notify(trade) + }) + } + + s.tradeCollector.OnTrade(func(trade types.Trade, profit, netProfit fixedpoint.Value) { + c := trade.PositionChange() + if trade.Exchange == s.sourceSession.ExchangeName { + s.CoveredPosition = s.CoveredPosition.Add(c) + } + + s.ProfitStats.AddTrade(trade) + + if profit.Compare(fixedpoint.Zero) == 0 { + s.Environment.RecordPosition(s.Position, trade, nil) + } else { + log.Infof("%s generated profit: %v", s.Symbol, profit) + + p := s.Position.NewProfit(trade, profit, netProfit) + p.Strategy = ID + p.StrategyInstanceID = instanceID + bbgo.Notify(&p) + s.ProfitStats.AddProfit(p) + + s.Environment.RecordPosition(s.Position, trade, &p) + } + }) + + s.tradeCollector.OnPositionUpdate(func(position *types.Position) { + bbgo.Notify(position) + }) + s.tradeCollector.OnRecover(func(trade types.Trade) { + bbgo.Notify("Recover trade", trade) + }) + s.tradeCollector.BindStream(s.sourceSession.UserDataStream) + s.tradeCollector.BindStream(s.makerSession.UserDataStream) + + s.stopC = make(chan struct{}) + + go func() { + posTicker := time.NewTicker(util.MillisecondsJitter(s.HedgeInterval.Duration(), 200)) + defer posTicker.Stop() + + quoteTicker := time.NewTicker(util.MillisecondsJitter(s.UpdateInterval.Duration(), 200)) + defer quoteTicker.Stop() + + reportTicker := time.NewTicker(time.Hour) + defer reportTicker.Stop() + + tradeScanInterval := 20 * time.Minute + tradeScanTicker := time.NewTicker(tradeScanInterval) + defer tradeScanTicker.Stop() + + defer func() { + if err := s.activeMakerOrders.GracefulCancel(context.Background(), s.makerSession.Exchange); err != nil { + log.WithError(err).Errorf("can not cancel %s orders", s.Symbol) + } + }() + + for { + select { + + case <-s.stopC: + log.Warnf("%s maker goroutine stopped, due to the stop signal", s.Symbol) + return + + case <-ctx.Done(): + log.Warnf("%s maker goroutine stopped, due to the cancelled context", s.Symbol) + return + + case <-quoteTicker.C: + s.updateQuote(ctx, orderExecutionRouter) + + case <-reportTicker.C: + bbgo.Notify(&s.ProfitStats) + + case <-tradeScanTicker.C: + log.Infof("scanning trades from %s ago...", tradeScanInterval) + startTime := time.Now().Add(-tradeScanInterval) + if err := s.tradeCollector.Recover(ctx, s.sourceSession.Exchange.(types.ExchangeTradeHistoryService), s.Symbol, startTime); err != nil { + log.WithError(err).Errorf("query trades error") + } + + case <-posTicker.C: + // For positive position and positive covered position: + // uncover position = +5 - +3 (covered position) = 2 + // + // For positive position and negative covered position: + // uncover position = +5 - (-3) (covered position) = 8 + // + // meaning we bought 5 on MAX and sent buy order with 3 on binance + // + // For negative position: + // uncover position = -5 - -3 (covered position) = -2 + s.tradeCollector.Process() + + position := s.Position.GetBase() + + uncoverPosition := position.Sub(s.CoveredPosition) + absPos := uncoverPosition.Abs() + if !s.DisableHedge && absPos.Compare(s.sourceMarket.MinQuantity) > 0 { + log.Infof("%s base position %v coveredPosition: %v uncoverPosition: %v", + s.Symbol, + position, + s.CoveredPosition, + uncoverPosition, + ) + + s.Hedge(ctx, uncoverPosition.Neg()) + } + } + } + }() + + s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { + defer wg.Done() + + close(s.stopC) + + // wait for the quoter to stop + time.Sleep(s.UpdateInterval.Duration()) + + shutdownCtx, cancelShutdown := context.WithTimeout(context.TODO(), time.Minute) + defer cancelShutdown() + + if err := s.activeMakerOrders.GracefulCancel(shutdownCtx, s.makerSession.Exchange); err != nil { + log.WithError(err).Errorf("graceful cancel error") + } + + bbgo.Notify("%s: %s position", ID, s.Symbol, s.Position) + }) + + return nil +} diff --git a/pkg/strategy/xmaker/strategy_test.go b/pkg/strategy/xmaker/strategy_test.go new file mode 100644 index 0000000000..d3f2b7fb96 --- /dev/null +++ b/pkg/strategy/xmaker/strategy_test.go @@ -0,0 +1,36 @@ +package xmaker + +import ( + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/types" + "github.com/stretchr/testify/assert" +) + +func Test_aggregatePrice(t *testing.T) { + bids := types.PriceVolumeSlice{ + { + Price: fixedpoint.NewFromFloat(1000.0), + Volume: fixedpoint.NewFromFloat(1.0), + }, + { + Price: fixedpoint.NewFromFloat(1200.0), + Volume: fixedpoint.NewFromFloat(1.0), + }, + { + Price: fixedpoint.NewFromFloat(1400.0), + Volume: fixedpoint.NewFromFloat(1.0), + }, + } + + aggregatedPrice1 := aggregatePrice(bids, fixedpoint.NewFromFloat(0.5)) + assert.Equal(t, fixedpoint.NewFromFloat(1000.0), aggregatedPrice1) + + aggregatedPrice2 := aggregatePrice(bids, fixedpoint.NewFromInt(1)) + assert.Equal(t, fixedpoint.NewFromFloat(1000.0), aggregatedPrice2) + + aggregatedPrice3 := aggregatePrice(bids, fixedpoint.NewFromInt(2)) + assert.Equal(t, fixedpoint.NewFromFloat(1100.0), aggregatedPrice3) + +} diff --git a/pkg/strategy/xnav/csv.go b/pkg/strategy/xnav/csv.go new file mode 100644 index 0000000000..d8793c52d4 --- /dev/null +++ b/pkg/strategy/xnav/csv.go @@ -0,0 +1 @@ +package xnav diff --git a/pkg/strategy/xnav/strategy.go b/pkg/strategy/xnav/strategy.go new file mode 100644 index 0000000000..f11582efb3 --- /dev/null +++ b/pkg/strategy/xnav/strategy.go @@ -0,0 +1,218 @@ +package xnav + +import ( + "context" + "sync" + "time" + + "github.com/c9s/bbgo/pkg/fixedpoint" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/slack-go/slack" + + "github.com/c9s/bbgo/pkg/bbgo" + "github.com/c9s/bbgo/pkg/service" + "github.com/c9s/bbgo/pkg/types" + "github.com/c9s/bbgo/pkg/util" +) + +const ID = "xnav" + +const stateKey = "state-v1" + +var log = logrus.WithField("strategy", ID) + +func init() { + bbgo.RegisterStrategy(ID, &Strategy{}) +} + +type State struct { + Since int64 `json:"since"` +} + +func (s *State) IsOver24Hours() bool { + return util.Over24Hours(time.Unix(s.Since, 0)) +} + +func (s *State) PlainText() string { + return util.Render(`{{ .Asset }} transfer stats: +daily number of transfers: {{ .DailyNumberOfTransfers }} +daily amount of transfers {{ .DailyAmountOfTransfers.Float64 }}`, s) +} + +func (s *State) SlackAttachment() slack.Attachment { + return slack.Attachment{ + // Pretext: "", + // Text: text, + Fields: []slack.AttachmentField{}, + Footer: util.Render("Since {{ . }}", time.Unix(s.Since, 0).Format(time.RFC822)), + } +} + +func (s *State) Reset() { + var beginningOfTheDay = util.BeginningOfTheDay(time.Now().Local()) + *s = State{ + Since: beginningOfTheDay.Unix(), + } +} + +type Strategy struct { + *bbgo.Graceful + *bbgo.Persistence + *bbgo.Environment + + Interval types.Interval `json:"interval"` + ReportOnStart bool `json:"reportOnStart"` + IgnoreDusts bool `json:"ignoreDusts"` + state *State +} + +func (s *Strategy) ID() string { + return ID +} + +var Ten = fixedpoint.NewFromInt(10) + +func (s *Strategy) CrossSubscribe(sessions map[string]*bbgo.ExchangeSession) {} + +func (s *Strategy) recordNetAssetValue(ctx context.Context, sessions map[string]*bbgo.ExchangeSession) { + totalBalances := types.BalanceMap{} + allPrices := map[string]fixedpoint.Value{} + sessionBalances := map[string]types.BalanceMap{} + priceTime := time.Now() + + // iterate the sessions and record them + for sessionName, session := range sessions { + // update the account balances and the margin information + if _, err := session.UpdateAccount(ctx); err != nil { + log.WithError(err).Errorf("can not update account") + return + } + + account := session.GetAccount() + balances := account.Balances() + if err := session.UpdatePrices(ctx, balances.Currencies(), "USDT"); err != nil { + log.WithError(err).Error("price update failed") + return + } + + sessionBalances[sessionName] = balances + totalBalances = totalBalances.Add(balances) + + prices := session.LastPrices() + assets := balances.Assets(prices, priceTime) + + // merge prices + for m, p := range prices { + allPrices[m] = p + } + + s.Environment.RecordAsset(priceTime, session, assets) + } + + displayAssets := types.AssetMap{} + totalAssets := totalBalances.Assets(allPrices, priceTime) + s.Environment.RecordAsset(priceTime, &bbgo.ExchangeSession{Name: "ALL"}, totalAssets) + + for currency, asset := range totalAssets { + // calculated if it's dust only when InUSD (usd value) is defined. + if s.IgnoreDusts && !asset.InUSD.IsZero() && asset.InUSD.Compare(Ten) < 0 { + continue + } + + displayAssets[currency] = asset + } + + bbgo.Notify(displayAssets) + + if s.state != nil { + if s.state.IsOver24Hours() { + s.state.Reset() + } + + s.SaveState() + } +} + +func (s *Strategy) SaveState() { + if err := s.Persistence.Save(s.state, ID, stateKey); err != nil { + log.WithError(err).Errorf("%s can not save state: %+v", ID, s.state) + } else { + log.Infof("%s state is saved: %+v", ID, s.state) + // s.Notifiability.Notify("%s %s state is saved", ID, s.Asset, s.state) + } +} + +func (s *Strategy) newDefaultState() *State { + return &State{} +} + +func (s *Strategy) LoadState() error { + var state State + if err := s.Persistence.Load(&state, ID, stateKey); err != nil { + if err != service.ErrPersistenceNotExists { + return err + } + + s.state = s.newDefaultState() + s.state.Reset() + } else { + // we loaded it successfully + s.state = &state + + // update Asset name for legacy caches + // s.state.Asset = s.Asset + + log.Infof("%s state is restored: %+v", ID, s.state) + bbgo.Notify("%s state is restored", ID, s.state) + } + + return nil +} + +func (s *Strategy) CrossRun(ctx context.Context, _ bbgo.OrderExecutionRouter, sessions map[string]*bbgo.ExchangeSession) error { + if s.Interval == "" { + return errors.New("interval can not be empty") + } + + if err := s.LoadState(); err != nil { + return err + } + + s.Graceful.OnShutdown(func(ctx context.Context, wg *sync.WaitGroup) { + defer wg.Done() + + s.SaveState() + }) + + if s.ReportOnStart { + s.recordNetAssetValue(ctx, sessions) + } + + if s.Environment.BacktestService != nil { + log.Warnf("xnav does not support backtesting") + } + + // TODO: if interval is supported, we can use kline as the ticker + if _, ok := types.SupportedIntervals[s.Interval]; ok { + + } + + go func() { + ticker := time.NewTicker(util.MillisecondsJitter(s.Interval.Duration(), 1000)) + defer ticker.Stop() + + for { + select { + case <-ctx.Done(): + return + + case <-ticker.C: + s.recordNetAssetValue(ctx, sessions) + } + } + }() + + return nil +} diff --git a/pkg/strategy/xpuremaker/strategy.go b/pkg/strategy/xpuremaker/strategy.go index 53f75737f2..a5d441bdc1 100644 --- a/pkg/strategy/xpuremaker/strategy.go +++ b/pkg/strategy/xpuremaker/strategy.go @@ -12,8 +12,12 @@ import ( "github.com/c9s/bbgo/pkg/types" ) +const ID = "xpuremaker" + +var Ten = fixedpoint.NewFromInt(10) + func init() { - bbgo.RegisterStrategy("xpuremaker", &Strategy{}) + bbgo.RegisterStrategy(ID, &Strategy{}) } type Strategy struct { @@ -29,6 +33,10 @@ type Strategy struct { activeOrders map[string]types.Order } +func (s *Strategy) ID() string { + return ID +} + func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { session.Subscribe(types.BookChannel, s.Symbol, types.SubscribeOptions{}) } @@ -36,7 +44,7 @@ func (s *Strategy) Subscribe(session *bbgo.ExchangeSession) { func (s *Strategy) Run(ctx context.Context, orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession) error { s.book = types.NewStreamBook(s.Symbol) - s.book.BindStream(session.Stream) + s.book.BindStream(session.UserDataStream) s.activeOrders = make(map[string]types.Order) @@ -104,8 +112,8 @@ func (s *Strategy) update(orderExecutor bbgo.OrderExecutor, session *bbgo.Exchan func (s *Strategy) updateOrders(orderExecutor bbgo.OrderExecutor, session *bbgo.ExchangeSession, side types.SideType) { var book = s.book.Copy() - var pvs = book.PriceVolumesBySide(side) - if pvs == nil || len(pvs) == 0 { + var pvs = book.SideBook(side) + if len(pvs) == 0 { log.Warnf("empty side: %s", side) return } @@ -139,26 +147,31 @@ func (s *Strategy) updateOrders(orderExecutor bbgo.OrderExecutor, session *bbgo. } func (s *Strategy) generateOrders(symbol string, side types.SideType, price, priceTick, baseQuantity fixedpoint.Value, numOrders int) (orders []types.SubmitOrder) { - var expBase = fixedpoint.NewFromFloat(0.0) + var expBase = fixedpoint.Zero switch side { case types.SideTypeBuy: - if priceTick > 0 { - priceTick = -priceTick + if priceTick.Sign() > 0 { + priceTick = priceTick.Neg() } case types.SideTypeSell: - if priceTick < 0 { - priceTick = -priceTick + if priceTick.Sign() < 0 { + priceTick = priceTick.Neg() } } - for i := 0; i < numOrders; i++ { - volume := math.Exp(expBase.Float64()) * baseQuantity.Float64() + decdigits := priceTick.Abs().NumIntDigits() + step := priceTick.Abs().MulExp(-decdigits + 1) + for i := 0; i < numOrders; i++ { + quantityExp := fixedpoint.NewFromFloat(math.Exp(expBase.Float64())) + volume := baseQuantity.Mul(quantityExp) + amount := volume.Mul(price) // skip order less than 10usd - if volume*price.Float64() < 10.0 { - log.Warnf("amount too small (< 10usd). price=%f volume=%f amount=%f", price.Float64(), volume, volume*price.Float64()) + if amount.Compare(Ten) < 0 { + log.Warnf("amount too small (< 10usd). price=%s volume=%s amount=%s", + price.String(), volume.String(), amount.String()) continue } @@ -166,19 +179,18 @@ func (s *Strategy) generateOrders(symbol string, side types.SideType, price, pri Symbol: symbol, Side: side, Type: types.OrderTypeLimit, - Price: price.Float64(), + Price: price, Quantity: volume, }) - log.Infof("%s order: %.2f @ %f", side, volume, price.Float64()) + log.Infof("%s order: %s @ %s", side, volume.String(), price.String()) if len(orders) >= numOrders { break } - price = price + priceTick - declog := math.Log10(math.Abs(priceTick.Float64())) - expBase += fixedpoint.NewFromFloat(math.Pow10(-int(declog)) * math.Abs(priceTick.Float64())) + price = price.Add(priceTick) + expBase = expBase.Add(step) } return orders diff --git a/pkg/testutil/auth.go b/pkg/testutil/auth.go new file mode 100644 index 0000000000..164207e295 --- /dev/null +++ b/pkg/testutil/auth.go @@ -0,0 +1,25 @@ +package testutil + +import ( + "os" + "regexp" + "testing" +) + +func maskSecret(s string) string { + re := regexp.MustCompile(`\b(\w{4})\w+\b`) + s = re.ReplaceAllString(s, "$1******") + return s +} + +func IntegrationTestConfigured(t *testing.T, prefix string) (key, secret string, ok bool) { + var hasKey, hasSecret bool + key, hasKey = os.LookupEnv(prefix + "_API_KEY") + secret, hasSecret = os.LookupEnv(prefix + "_API_SECRET") + ok = hasKey && hasSecret && os.Getenv("TEST_"+prefix) == "1" + if ok { + t.Logf(prefix+" api integration test enabled, key = %s, secret = %s", maskSecret(key), maskSecret(secret)) + } + + return key, secret, ok +} diff --git a/pkg/types/account.go b/pkg/types/account.go index e3f45a718a..866368c13c 100644 --- a/pkg/types/account.go +++ b/pkg/types/account.go @@ -2,63 +2,105 @@ package types import ( "fmt" - "strings" "sync" "github.com/sirupsen/logrus" + "github.com/spf13/viper" "github.com/c9s/bbgo/pkg/fixedpoint" ) -type Balance struct { - Currency string `json:"currency"` - Available fixedpoint.Value `json:"available"` - Locked fixedpoint.Value `json:"locked"` +var debugBalance = false + +func init() { + debugBalance = viper.GetBool("debug-balance") } -func (b Balance) String() string { - if b.Locked > 0 { - return fmt.Sprintf("%s: %f (locked %f)", b.Currency, b.Available.Float64(), b.Locked.Float64()) - } +type PositionMap map[string]Position +type IsolatedMarginAssetMap map[string]IsolatedMarginAsset +type MarginAssetMap map[string]MarginUserAsset +type FuturesAssetMap map[string]FuturesUserAsset +type FuturesPositionMap map[string]FuturesPosition - return fmt.Sprintf("%s: %f", b.Currency, b.Available.Float64()) -} +type AccountType string -type BalanceMap map[string]Balance +const ( + AccountTypeFutures = AccountType("futures") + AccountTypeMargin = AccountType("margin") + AccountTypeIsolatedMargin = AccountType("isolated_margin") + AccountTypeSpot = AccountType("spot") +) -func (m BalanceMap) String() string { - var ss []string - for _, b := range m { - ss = append(ss, b.String()) - } +type Account struct { + sync.Mutex `json:"-"` - return "BalanceMap[" + strings.Join(ss, ", ") + "]" -} + AccountType AccountType `json:"accountType,omitempty"` + FuturesInfo *FuturesAccountInfo + MarginInfo *MarginAccountInfo + IsolatedMarginInfo *IsolatedMarginAccountInfo -func (m BalanceMap) Print() { - for _, balance := range m { - if balance.Available == 0 && balance.Locked == 0 { - continue - } + // Margin related common field + // From binance: + // Margin Level = Total Asset Value / (Total Borrowed + Total Accrued Interest) + // If your margin level drops to 1.3, you will receive a Margin Call, which is a reminder that you should either increase your collateral (by depositing more funds) or reduce your loan (by repaying what you’ve borrowed). + // If your margin level drops to 1.1, your assets will be automatically liquidated, meaning that Binance will sell your funds at market price to repay the loan. + MarginLevel fixedpoint.Value `json:"marginLevel,omitempty"` + MarginTolerance fixedpoint.Value `json:"marginTolerance,omitempty"` - if balance.Locked > 0 { - logrus.Infof(" %s: %f (locked %f)", balance.Currency, balance.Available.Float64(), balance.Locked.Float64()) - } else { - logrus.Infof(" %s: %f", balance.Currency, balance.Available.Float64()) - } - } -} + BorrowEnabled bool `json:"borrowEnabled,omitempty"` + TransferEnabled bool `json:"transferEnabled,omitempty"` -type Account struct { - sync.Mutex + // isolated margin related fields + // LiquidationPrice is only used when account is in the isolated margin mode + MarginRatio fixedpoint.Value `json:"marginRatio,omitempty"` + LiquidationPrice fixedpoint.Value `json:"liquidationPrice,omitempty"` + LiquidationRate fixedpoint.Value `json:"liquidationRate,omitempty"` + + MakerFeeRate fixedpoint.Value `json:"makerFeeRate,omitempty"` + TakerFeeRate fixedpoint.Value `json:"takerFeeRate,omitempty"` - MakerCommission int `json:"makerCommission"` - TakerCommission int `json:"takerCommission"` - AccountType string `json:"accountType"` + TotalAccountValue fixedpoint.Value `json:"totalAccountValue,omitempty"` + + CanDeposit bool `json:"canDeposit"` + CanTrade bool `json:"canTrade"` + CanWithdraw bool `json:"canWithdraw"` balances BalanceMap } +type FuturesAccountInfo struct { + // Futures fields + Assets FuturesAssetMap `json:"assets"` + Positions FuturesPositionMap `json:"positions"` + TotalInitialMargin fixedpoint.Value `json:"totalInitialMargin"` + TotalMaintMargin fixedpoint.Value `json:"totalMaintMargin"` + TotalMarginBalance fixedpoint.Value `json:"totalMarginBalance"` + TotalOpenOrderInitialMargin fixedpoint.Value `json:"totalOpenOrderInitialMargin"` + TotalPositionInitialMargin fixedpoint.Value `json:"totalPositionInitialMargin"` + TotalUnrealizedProfit fixedpoint.Value `json:"totalUnrealizedProfit"` + TotalWalletBalance fixedpoint.Value `json:"totalWalletBalance"` + UpdateTime int64 `json:"updateTime"` +} + +type MarginAccountInfo struct { + // Margin fields + BorrowEnabled bool `json:"borrowEnabled"` + MarginLevel fixedpoint.Value `json:"marginLevel"` + TotalAssetOfBTC fixedpoint.Value `json:"totalAssetOfBtc"` + TotalLiabilityOfBTC fixedpoint.Value `json:"totalLiabilityOfBtc"` + TotalNetAssetOfBTC fixedpoint.Value `json:"totalNetAssetOfBtc"` + TradeEnabled bool `json:"tradeEnabled"` + TransferEnabled bool `json:"transferEnabled"` + Assets MarginAssetMap `json:"userAssets"` +} + +type IsolatedMarginAccountInfo struct { + TotalAssetOfBTC fixedpoint.Value `json:"totalAssetOfBtc"` + TotalLiabilityOfBTC fixedpoint.Value `json:"totalLiabilityOfBtc"` + TotalNetAssetOfBTC fixedpoint.Value `json:"totalNetAssetOfBtc"` + Assets IsolatedMarginAssetMap `json:"userAssets"` +} + func NewAccount() *Account { return &Account{ balances: make(BalanceMap), @@ -66,15 +108,10 @@ func NewAccount() *Account { } // Balances lock the balances and returned the copied balances -func (a *Account) Balances() BalanceMap { - d := make(BalanceMap) - +func (a *Account) Balances() (d BalanceMap) { a.Lock() - for c, b := range a.balances { - d[c] = b - } + d = a.balances.Copy() a.Unlock() - return d } @@ -85,23 +122,22 @@ func (a *Account) Balance(currency string) (balance Balance, ok bool) { return balance, ok } -func (a *Account) AddBalance(currency string, fund fixedpoint.Value) error { +func (a *Account) AddBalance(currency string, fund fixedpoint.Value) { a.Lock() defer a.Unlock() balance, ok := a.balances[currency] if ok { - balance.Available += fund + balance.Available = balance.Available.Add(fund) a.balances[currency] = balance - return nil + return } a.balances[currency] = Balance{ Currency: currency, Available: fund, - Locked: 0, + Locked: fixedpoint.Zero, } - return nil } func (a *Account) UseLockedBalance(currency string, fund fixedpoint.Value) error { @@ -109,15 +145,22 @@ func (a *Account) UseLockedBalance(currency string, fund fixedpoint.Value) error defer a.Unlock() balance, ok := a.balances[currency] - if ok && balance.Locked >= fund { - balance.Locked -= fund + if !ok { + return fmt.Errorf("account balance %s does not exist", currency) + } + + // simple case, using fund less than locked + if balance.Locked.Compare(fund) >= 0 { + balance.Locked = balance.Locked.Sub(fund) a.balances[currency] = balance return nil } - return fmt.Errorf("trying to use more than locked: locked %f < want to use %f", balance.Locked.Float64(), fund.Float64()) + return fmt.Errorf("trying to use more than locked: locked %v < want to use %v", balance.Locked, fund) } +var QuantityDelta = fixedpoint.MustNewFromString("0.00000000001") + func (a *Account) UnlockBalance(currency string, unlocked fixedpoint.Value) error { a.Lock() defer a.Unlock() @@ -127,12 +170,22 @@ func (a *Account) UnlockBalance(currency string, unlocked fixedpoint.Value) erro return fmt.Errorf("trying to unlocked inexisted balance: %s", currency) } - if unlocked > balance.Locked { - return fmt.Errorf("trying to unlocked more than locked %s: locked %f < want to unlock %f", currency, balance.Locked.Float64(), unlocked.Float64()) + // Instead of showing error in UnlockBalance, + // since this function is only called when cancel orders, + // there might be inequivalence in the last order quantity + if unlocked.Compare(balance.Locked) > 0 { + // check if diff is within delta + if unlocked.Sub(balance.Locked).Compare(QuantityDelta) <= 0 { + balance.Available = balance.Available.Add(balance.Locked) + balance.Locked = fixedpoint.Zero + a.balances[currency] = balance + return nil + } + return fmt.Errorf("trying to unlocked more than locked %s: locked %v < want to unlock %v", currency, balance.Locked, unlocked) } - balance.Locked -= unlocked - balance.Available += unlocked + balance.Locked = balance.Locked.Sub(unlocked) + balance.Available = balance.Available.Add(unlocked) a.balances[currency] = balance return nil } @@ -142,14 +195,14 @@ func (a *Account) LockBalance(currency string, locked fixedpoint.Value) error { defer a.Unlock() balance, ok := a.balances[currency] - if ok && balance.Available >= locked { - balance.Locked += locked - balance.Available -= locked + if ok && balance.Available.Compare(locked) >= 0 { + balance.Locked = balance.Locked.Add(locked) + balance.Available = balance.Available.Sub(locked) a.balances[currency] = balance return nil } - return fmt.Errorf("insufficient available balance %s for lock: want to lock %f, available %f", currency, locked.Float64(), balance.Available.Float64()) + return fmt.Errorf("insufficient available balance %s for lock: want to lock %v, available %v", currency, locked, balance.Available) } func (a *Account) UpdateBalances(balances BalanceMap) { @@ -165,18 +218,20 @@ func (a *Account) UpdateBalances(balances BalanceMap) { } } -func (a *Account) BindStream(stream Stream) { - stream.OnBalanceUpdate(a.UpdateBalances) - stream.OnBalanceSnapshot(a.UpdateBalances) -} - func (a *Account) Print() { a.Lock() defer a.Unlock() - for _, balance := range a.balances { - if balance.Available != 0 { - logrus.Infof("account balance %s %f", balance.Currency, balance.Available.Float64()) - } + if a.AccountType != "" { + logrus.Infof("account type: %s", a.AccountType) } + + if a.MakerFeeRate.Sign() > 0 { + logrus.Infof("maker fee rate: %v", a.MakerFeeRate) + } + if a.TakerFeeRate.Sign() > 0 { + logrus.Infof("taker fee rate: %v", a.TakerFeeRate) + } + + a.balances.Print() } diff --git a/pkg/types/account_test.go b/pkg/types/account_test.go index b46cc18c1e..3f3da99366 100644 --- a/pkg/types/account_test.go +++ b/pkg/types/account_test.go @@ -10,53 +10,53 @@ import ( func TestAccountLockAndUnlock(t *testing.T) { a := NewAccount() - err := a.AddBalance("USDT", 1000) - assert.NoError(t, err) + a.AddBalance("USDT", fixedpoint.NewFromInt(1000)) + var err error balance, ok := a.Balance("USDT") assert.True(t, ok) - assert.Equal(t, balance.Available, fixedpoint.Value(1000)) - assert.Equal(t, balance.Locked, fixedpoint.Value(0)) + assert.Equal(t, balance.Available, fixedpoint.NewFromInt(1000)) + assert.Equal(t, balance.Locked, fixedpoint.Zero) - err = a.LockBalance("USDT", fixedpoint.Value(100)) + err = a.LockBalance("USDT", fixedpoint.NewFromInt(100)) assert.NoError(t, err) balance, ok = a.Balance("USDT") assert.True(t, ok) - assert.Equal(t, balance.Available, fixedpoint.Value(900)) - assert.Equal(t, balance.Locked, fixedpoint.Value(100)) + assert.Equal(t, balance.Available, fixedpoint.NewFromInt(900)) + assert.Equal(t, balance.Locked, fixedpoint.NewFromInt(100)) - err = a.UnlockBalance("USDT", 100) + err = a.UnlockBalance("USDT", fixedpoint.NewFromInt(100)) assert.NoError(t, err) balance, ok = a.Balance("USDT") assert.True(t, ok) - assert.Equal(t, balance.Available, fixedpoint.Value(1000)) - assert.Equal(t, balance.Locked, fixedpoint.Value(0)) + assert.Equal(t, balance.Available, fixedpoint.NewFromInt(1000)) + assert.Equal(t, balance.Locked, fixedpoint.Zero) } func TestAccountLockAndUse(t *testing.T) { a := NewAccount() - err := a.AddBalance("USDT", 1000) - assert.NoError(t, err) + a.AddBalance("USDT", fixedpoint.NewFromInt(1000)) + var err error balance, ok := a.Balance("USDT") assert.True(t, ok) - assert.Equal(t, balance.Available, fixedpoint.Value(1000)) - assert.Equal(t, balance.Locked, fixedpoint.Value(0)) + assert.Equal(t, balance.Available, fixedpoint.NewFromInt(1000)) + assert.Equal(t, balance.Locked, fixedpoint.Zero) - err = a.LockBalance("USDT", 100) + err = a.LockBalance("USDT", fixedpoint.NewFromInt(100)) assert.NoError(t, err) balance, ok = a.Balance("USDT") assert.True(t, ok) - assert.Equal(t, balance.Available, fixedpoint.Value(900)) - assert.Equal(t, balance.Locked, fixedpoint.Value(100)) + assert.Equal(t, balance.Available, fixedpoint.NewFromInt(900)) + assert.Equal(t, balance.Locked, fixedpoint.NewFromInt(100)) - err = a.UseLockedBalance("USDT", 100) + err = a.UseLockedBalance("USDT", fixedpoint.NewFromInt(100)) assert.NoError(t, err) balance, ok = a.Balance("USDT") assert.True(t, ok) - assert.Equal(t, balance.Available, fixedpoint.Value(900)) - assert.Equal(t, balance.Locked, fixedpoint.Value(0)) + assert.Equal(t, balance.Available, fixedpoint.NewFromInt(900)) + assert.Equal(t, balance.Locked, fixedpoint.Zero) } diff --git a/pkg/types/active_book.go b/pkg/types/active_book.go deleted file mode 100644 index 5bb13ecf9d..0000000000 --- a/pkg/types/active_book.go +++ /dev/null @@ -1,2 +0,0 @@ -package types - diff --git a/pkg/types/asset.go b/pkg/types/asset.go new file mode 100644 index 0000000000..8b17de4226 --- /dev/null +++ b/pkg/types/asset.go @@ -0,0 +1,147 @@ +package types + +import ( + "fmt" + "sort" + "time" + + "github.com/slack-go/slack" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type Asset struct { + Currency string `json:"currency" db:"currency"` + + Total fixedpoint.Value `json:"total" db:"total"` + + NetAsset fixedpoint.Value `json:"netAsset" db:"net_asset"` + + Interest fixedpoint.Value `json:"interest" db:"interest"` + + // InUSD is net asset in USD + InUSD fixedpoint.Value `json:"inUSD" db:"net_asset_in_usd"` + + // InBTC is net asset in BTC + InBTC fixedpoint.Value `json:"inBTC" db:"net_asset_in_btc"` + + Time time.Time `json:"time" db:"time"` + Locked fixedpoint.Value `json:"lock" db:"lock" ` + Available fixedpoint.Value `json:"available" db:"available"` + Borrowed fixedpoint.Value `json:"borrowed" db:"borrowed"` + PriceInUSD fixedpoint.Value `json:"priceInUSD" db:"price_in_usd"` +} + +type AssetMap map[string]Asset + +func (m AssetMap) InUSD() (total fixedpoint.Value) { + for _, a := range m { + if a.InUSD.IsZero() { + continue + } + + total = total.Add(a.InUSD) + } + return total +} + +func (m AssetMap) PlainText() (o string) { + var assets = m.Slice() + + // sort assets + sort.Slice(assets, func(i, j int) bool { + return assets[i].InUSD.Compare(assets[j].InUSD) > 0 + }) + + sumUsd := fixedpoint.Zero + sumBTC := fixedpoint.Zero + for _, a := range assets { + usd := a.InUSD + btc := a.InBTC + if !a.InUSD.IsZero() { + o += fmt.Sprintf(" %s: %s (≈ %s) (≈ %s)", + a.Currency, + a.Total.String(), + USD.FormatMoney(usd), + BTC.FormatMoney(btc), + ) + "\n" + sumUsd = sumUsd.Add(usd) + sumBTC = sumBTC.Add(btc) + } else { + o += fmt.Sprintf(" %s: %s", + a.Currency, + a.Total.String(), + ) + "\n" + } + } + o += fmt.Sprintf(" Summary: (≈ %s) (≈ %s)", + USD.FormatMoney(sumUsd), + BTC.FormatMoney(sumBTC), + ) + "\n" + return o +} + +func (m AssetMap) Slice() (assets []Asset) { + for _, a := range m { + assets = append(assets, a) + } + return assets +} + +func (m AssetMap) SlackAttachment() slack.Attachment { + var fields []slack.AttachmentField + var netAssetInBTC, netAssetInUSD fixedpoint.Value + + var assets = m.Slice() + + // sort assets + sort.Slice(assets, func(i, j int) bool { + return assets[i].InUSD.Compare(assets[j].InUSD) > 0 + }) + + for _, a := range assets { + netAssetInUSD = netAssetInUSD.Add(a.InUSD) + netAssetInBTC = netAssetInBTC.Add(a.InBTC) + } + + for _, a := range assets { + if !a.InUSD.IsZero() { + text := fmt.Sprintf("%s (≈ %s) (≈ %s) (%s)", + a.NetAsset.String(), + USD.FormatMoney(a.InUSD), + BTC.FormatMoney(a.InBTC), + a.InUSD.Div(netAssetInUSD).FormatPercentage(2), + ) + + if !a.Borrowed.IsZero() { + text += fmt.Sprintf(" Borrowed: %s", a.Borrowed.String()) + } + + fields = append(fields, slack.AttachmentField{ + Title: a.Currency, + Value: text, + Short: false, + }) + } else { + text := a.NetAsset.String() + + if !a.Borrowed.IsZero() { + text += fmt.Sprintf(" Borrowed: %s", a.Borrowed.String()) + } + + fields = append(fields, slack.AttachmentField{ + Title: a.Currency, + Value: text, + Short: false, + }) + } + } + + return slack.Attachment{ + Title: fmt.Sprintf("Net Asset Value %s (≈ %s)", + USD.FormatMoney(netAssetInUSD), + BTC.FormatMoney(netAssetInBTC), + ), + Fields: fields, + } +} diff --git a/pkg/types/backtest_stream.go b/pkg/types/backtest_stream.go new file mode 100644 index 0000000000..ee46d31fcc --- /dev/null +++ b/pkg/types/backtest_stream.go @@ -0,0 +1,19 @@ +package types + +import ( + "context" +) + +type BacktestStream struct { + StandardStreamEmitter +} + +func (s *BacktestStream) Connect(ctx context.Context) error { + s.EmitConnect() + s.EmitStart() + return nil +} + +func (s *BacktestStream) Close() error { + return nil +} diff --git a/pkg/types/balance.go b/pkg/types/balance.go new file mode 100644 index 0000000000..8a69895e74 --- /dev/null +++ b/pkg/types/balance.go @@ -0,0 +1,258 @@ +package types + +import ( + "fmt" + "strconv" + "strings" + "time" + + log "github.com/sirupsen/logrus" + "github.com/slack-go/slack" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type Balance struct { + Currency string `json:"currency"` + Available fixedpoint.Value `json:"available"` + Locked fixedpoint.Value `json:"locked,omitempty"` + + // margin related fields + Borrowed fixedpoint.Value `json:"borrowed,omitempty"` + Interest fixedpoint.Value `json:"interest,omitempty"` + + // NetAsset = (Available + Locked) - Borrowed - Interest + NetAsset fixedpoint.Value `json:"net,omitempty"` +} + +func (b Balance) Add(b2 Balance) Balance { + var newB = b + newB.Available = b.Available.Add(b2.Available) + newB.Locked = b.Locked.Add(b2.Locked) + newB.Borrowed = b.Borrowed.Add(b2.Borrowed) + newB.NetAsset = b.NetAsset.Add(b2.NetAsset) + newB.Interest = b.Interest.Add(b2.Interest) + return newB +} + +func (b Balance) Total() fixedpoint.Value { + return b.Available.Add(b.Locked) +} + +func (b Balance) Net() fixedpoint.Value { + total := b.Total() + netAsset := b.NetAsset + if netAsset.IsZero() { + netAsset = total.Sub(b.Borrowed).Sub(b.Interest) + } + + return netAsset +} + +func (b Balance) ValueString() (o string) { + o = b.Available.String() + + if b.Locked.Sign() > 0 { + o += fmt.Sprintf(" (locked %v)", b.Locked) + } + + if b.Borrowed.Sign() > 0 { + o += fmt.Sprintf(" (borrowed: %v)", b.Borrowed) + } + + return o +} + +func (b Balance) String() (o string) { + o = fmt.Sprintf("%s: %s", b.Currency, b.Available.String()) + + if b.Locked.Sign() > 0 { + o += fmt.Sprintf(" (locked %v)", b.Locked) + } + + if b.Borrowed.Sign() > 0 { + o += fmt.Sprintf(" (borrowed: %v)", b.Borrowed) + } + + return o +} + +type BalanceSnapshot struct { + Balances BalanceMap `json:"balances"` + Session string `json:"session"` + Time time.Time `json:"time"` +} + +func (m BalanceSnapshot) CsvHeader() []string { + return []string{"time", "session", "currency", "available", "locked", "borrowed"} +} + +func (m BalanceSnapshot) CsvRecords() [][]string { + var records [][]string + + for cur, b := range m.Balances { + records = append(records, []string{ + strconv.FormatInt(m.Time.Unix(), 10), + m.Session, + cur, + b.Available.String(), + b.Locked.String(), + b.Borrowed.String(), + }) + } + + return records +} + +type BalanceMap map[string]Balance + +func (m BalanceMap) Debts() BalanceMap { + bm := make(BalanceMap) + for c, b := range m { + if b.Borrowed.Sign() > 0 || b.Interest.Sign() > 0 { + bm[c] = b + } + } + return bm +} + +func (m BalanceMap) Currencies() (currencies []string) { + for _, b := range m { + currencies = append(currencies, b.Currency) + } + return currencies +} + +func (m BalanceMap) Add(bm BalanceMap) BalanceMap { + var total = m.Copy() + for _, b := range bm { + tb, ok := total[b.Currency] + if ok { + tb = tb.Add(b) + } else { + tb = b + } + total[b.Currency] = tb + } + return total +} + +func (m BalanceMap) String() string { + var ss []string + for _, b := range m { + ss = append(ss, b.String()) + } + + return "BalanceMap[" + strings.Join(ss, ", ") + "]" +} + +func (m BalanceMap) Copy() (d BalanceMap) { + d = make(BalanceMap) + for c, b := range m { + d[c] = b + } + return d +} + +// Assets converts balances into assets with the given prices +func (m BalanceMap) Assets(prices map[string]fixedpoint.Value, priceTime time.Time) AssetMap { + assets := make(AssetMap) + + _, btcInUSD, hasBtcPrice := findUSDMarketPrice("BTC", prices) + + for currency, b := range m { + total := b.Total() + netAsset := b.Net() + if total.IsZero() && netAsset.IsZero() { + continue + } + + asset := Asset{ + Currency: currency, + Total: total, + Time: priceTime, + Locked: b.Locked, + Available: b.Available, + Borrowed: b.Borrowed, + Interest: b.Interest, + NetAsset: netAsset, + } + + if strings.HasPrefix(currency, "USD") { // for usd + asset.InUSD = netAsset + asset.PriceInUSD = fixedpoint.One + if hasBtcPrice && !asset.InUSD.IsZero() { + asset.InBTC = asset.InUSD.Div(btcInUSD) + } + } else { // for crypto + if market, usdPrice, ok := findUSDMarketPrice(currency, prices); ok { + // this includes USDT, USD, USDC and so on + if strings.HasPrefix(market, "USD") { // for prices like USDT/TWD + if !asset.NetAsset.IsZero() { + asset.InUSD = asset.NetAsset.Div(usdPrice) + } + asset.PriceInUSD = fixedpoint.One.Div(usdPrice) + } else { // for prices like BTC/USDT + if !asset.NetAsset.IsZero() { + asset.InUSD = asset.NetAsset.Mul(usdPrice) + } + asset.PriceInUSD = usdPrice + } + + if hasBtcPrice && !asset.InUSD.IsZero() { + asset.InBTC = asset.InUSD.Div(btcInUSD) + } + } + } + + assets[currency] = asset + } + + return assets +} + +func (m BalanceMap) Print() { + for _, balance := range m { + if balance.Available.IsZero() && balance.Locked.IsZero() { + continue + } + + o := fmt.Sprintf(" %s: %v", balance.Currency, balance.Available) + if balance.Locked.Sign() > 0 { + o += fmt.Sprintf(" (locked %v)", balance.Locked) + } + + if balance.Borrowed.Sign() > 0 { + o += fmt.Sprintf(" (borrowed %v)", balance.Borrowed) + } + + log.Infoln(o) + } +} + +func (m BalanceMap) SlackAttachment() slack.Attachment { + var fields []slack.AttachmentField + + for _, b := range m { + fields = append(fields, slack.AttachmentField{ + Title: b.Currency, + Value: b.ValueString(), + Short: true, + }) + } + + return slack.Attachment{ + Color: "#CCA33F", + Fields: fields, + } +} + +func findUSDMarketPrice(currency string, prices map[string]fixedpoint.Value) (string, fixedpoint.Value, bool) { + usdMarkets := []string{currency + "USDT", currency + "USDC", currency + "USD", "USDT" + currency} + for _, market := range usdMarkets { + if usdPrice, ok := prices[market]; ok { + return market, usdPrice, ok + } + } + return "", fixedpoint.Zero, false +} diff --git a/pkg/types/balance_test.go b/pkg/types/balance_test.go new file mode 100644 index 0000000000..6b03481927 --- /dev/null +++ b/pkg/types/balance_test.go @@ -0,0 +1,39 @@ +package types + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +func TestBalanceMap_Add(t *testing.T) { + var bm = BalanceMap{} + var bm2 = bm.Add(BalanceMap{ + "BTC": Balance{ + Currency: "BTC", + Available: fixedpoint.MustNewFromString("10.0"), + Locked: fixedpoint.MustNewFromString("0"), + NetAsset: fixedpoint.MustNewFromString("10.0"), + }, + }) + assert.Len(t, bm2, 1) + + var bm3 = bm2.Add(BalanceMap{ + "BTC": Balance{ + Currency: "BTC", + Available: fixedpoint.MustNewFromString("1.0"), + Locked: fixedpoint.MustNewFromString("0"), + NetAsset: fixedpoint.MustNewFromString("1.0"), + }, + "LTC": Balance{ + Currency: "LTC", + Available: fixedpoint.MustNewFromString("20.0"), + Locked: fixedpoint.MustNewFromString("0"), + NetAsset: fixedpoint.MustNewFromString("20.0"), + }, + }) + assert.Len(t, bm3, 2) + assert.Equal(t, fixedpoint.MustNewFromString("11.0"), bm3["BTC"].Available) +} diff --git a/pkg/types/batch.go b/pkg/types/batch.go deleted file mode 100644 index bb5c003227..0000000000 --- a/pkg/types/batch.go +++ /dev/null @@ -1,172 +0,0 @@ -package types - -import ( - "context" - "time" - - "github.com/sirupsen/logrus" - "golang.org/x/time/rate" -) - -type ExchangeBatchProcessor struct { - Exchange -} - -func (e ExchangeBatchProcessor) BatchQueryClosedOrders(ctx context.Context, symbol string, startTime, endTime time.Time, lastOrderID uint64) (c chan Order, errC chan error) { - c = make(chan Order, 500) - errC = make(chan error, 1) - - go func() { - limiter := rate.NewLimiter(rate.Every(5*time.Second), 2) // from binance (original 1200, use 1000 for safety) - - defer close(c) - defer close(errC) - - orderIDs := make(map[uint64]struct{}, 500) - if lastOrderID > 0 { - orderIDs[lastOrderID] = struct{}{} - } - - for startTime.Before(endTime) { - if err := limiter.Wait(ctx); err != nil { - logrus.WithError(err).Error("rate limit error") - } - - logrus.Infof("batch querying %s closed orders %s <=> %s", symbol, startTime, endTime) - - orders, err := e.QueryClosedOrders(ctx, symbol, startTime, endTime, lastOrderID) - if err != nil { - errC <- err - return - } - - if len(orders) == 0 || (len(orders) == 1 && orders[0].OrderID == lastOrderID) { - return - } - - for _, o := range orders { - if _, ok := orderIDs[o.OrderID]; ok { - logrus.Infof("skipping duplicated order id: %d", o.OrderID) - continue - } - - c <- o - startTime = o.CreationTime - lastOrderID = o.OrderID - orderIDs[o.OrderID] = struct{}{} - } - } - - }() - - return c, errC -} - -func (e ExchangeBatchProcessor) BatchQueryKLines(ctx context.Context, symbol string, interval Interval, startTime, endTime time.Time) (c chan KLine, errC chan error) { - c = make(chan KLine, 1000) - errC = make(chan error, 1) - - go func() { - limiter := rate.NewLimiter(rate.Every(5*time.Second), 2) // from binance (original 1200, use 1000 for safety) - - defer close(c) - defer close(errC) - - for startTime.Before(endTime) { - if err := limiter.Wait(ctx); err != nil { - logrus.WithError(err).Error("rate limit error") - } - - kLines, err := e.QueryKLines(ctx, symbol, interval, KLineQueryOptions{ - StartTime: &startTime, - Limit: 1000, - }) - - if err != nil { - errC <- err - return - } - - if len(kLines) == 0 { - return - } - - for _, kline := range kLines { - // ignore any kline before the given start time - if kline.StartTime.Before(startTime) { - continue - } - - if kline.EndTime.After(endTime) { - return - } - - c <- kline - startTime = kline.EndTime - } - } - }() - - return c, errC -} - -func (e ExchangeBatchProcessor) BatchQueryTrades(ctx context.Context, symbol string, options *TradeQueryOptions) (c chan Trade, errC chan error) { - c = make(chan Trade, 500) - errC = make(chan error, 1) - - // last 7 days - var startTime = time.Now().Add(-7 * 24 * time.Hour) - if options.StartTime != nil { - startTime = *options.StartTime - } - - var lastTradeID = options.LastTradeID - - go func() { - limiter := rate.NewLimiter(rate.Every(5*time.Second), 2) // from binance (original 1200, use 1000 for safety) - - defer close(c) - defer close(errC) - - for { - if err := limiter.Wait(ctx); err != nil { - logrus.WithError(err).Error("rate limit error") - } - - logrus.Infof("querying %s trades from %s, limit=%d", symbol, startTime, options.Limit) - - trades, err := e.QueryTrades(ctx, symbol, &TradeQueryOptions{ - StartTime: &startTime, - Limit: options.Limit, - LastTradeID: lastTradeID, - }) - if err != nil { - errC <- err - return - } - - if len(trades) == 0 { - break - } - - if len(trades) == 1 && trades[0].ID == lastTradeID { - break - } - - logrus.Infof("returned %d trades", len(trades)) - - startTime = trades[len(trades)-1].Time - for _, t := range trades { - // ignore the first trade if last TradeID is given - if t.ID == lastTradeID { - continue - } - - c <- t - lastTradeID = t.ID - } - } - }() - - return c, errC -} diff --git a/pkg/types/bookticker.go b/pkg/types/bookticker.go new file mode 100644 index 0000000000..5bb6b4e331 --- /dev/null +++ b/pkg/types/bookticker.go @@ -0,0 +1,22 @@ +package types + +import ( + "fmt" + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +// BookTicker time exists in ftx, not in binance +// last exists in ftx, not in binance +type BookTicker struct { + //Time time.Time + Symbol string + Buy fixedpoint.Value // `buy` from Max, `bidPrice` from binance + BuySize fixedpoint.Value + Sell fixedpoint.Value // `sell` from Max, `askPrice` from binance + SellSize fixedpoint.Value + //Last fixedpoint.Value +} + +func (b BookTicker) String() string { + return fmt.Sprintf("BookTicker { Symbol: %s,Buy: %f , BuySize: %f, Sell: %f, SellSize :%f } ", b.Symbol, b.Buy.Float64(), b.BuySize.Float64(), b.Sell.Float64(), b.SellSize.Float64()) +} diff --git a/pkg/types/channel.go b/pkg/types/channel.go new file mode 100644 index 0000000000..23a90fc1e6 --- /dev/null +++ b/pkg/types/channel.go @@ -0,0 +1,8 @@ +package types + +type Channel string + +var BookChannel = Channel("book") +var KLineChannel = Channel("kline") +var BookTickerChannel = Channel("bookticker") +var MarketTradeChannel = Channel("trade") diff --git a/pkg/types/color.go b/pkg/types/color.go index fa714b5cc0..ac8324aa2d 100644 --- a/pkg/types/color.go +++ b/pkg/types/color.go @@ -1,4 +1,5 @@ package types -const Green = "#228B22" -const Red = "#800000" +const GreenColor = "#228B22" +const RedColor = "#800000" +const GrayColor = "#f0f0f0" diff --git a/pkg/types/csv.go b/pkg/types/csv.go new file mode 100644 index 0000000000..46a0263bfc --- /dev/null +++ b/pkg/types/csv.go @@ -0,0 +1,7 @@ +package types + +// CsvFormatter is an interface used for dumping object into csv file +type CsvFormatter interface { + CsvHeader() []string + CsvRecords() [][]string +} diff --git a/pkg/types/currencies.go b/pkg/types/currencies.go index 6c6bfa3057..3be262926a 100644 --- a/pkg/types/currencies.go +++ b/pkg/types/currencies.go @@ -1,7 +1,34 @@ package types +import "math/big" + import "github.com/leekchan/accounting" -var USD = accounting.Accounting{Symbol: "$ ", Precision: 2} -var BTC = accounting.Accounting{Symbol: "BTC ", Precision: 2} -var BNB = accounting.Accounting{Symbol: "BNB ", Precision: 4} +import "github.com/c9s/bbgo/pkg/fixedpoint" + +type Acc = accounting.Accounting + +type wrapper struct { + Acc +} + +func (w *wrapper) FormatMoney(v fixedpoint.Value) string { + f := new(big.Float) + f.SetString(v.String()) + return w.Acc.FormatMoneyBigFloat(f) +} + +var USD = wrapper{accounting.Accounting{Symbol: "$ ", Precision: 2}} +var BTC = wrapper{accounting.Accounting{Symbol: "BTC ", Precision: 8}} +var BNB = wrapper{accounting.Accounting{Symbol: "BNB ", Precision: 4}} + +var FiatCurrencies = []string{"USDC", "USDT", "USD", "TWD", "EUR", "GBP", "BUSD"} + +func IsFiatCurrency(currency string) bool { + for _, c := range FiatCurrencies { + if c == currency { + return true + } + } + return false +} diff --git a/pkg/types/deposit.go b/pkg/types/deposit.go index e75117f993..2414da8fba 100644 --- a/pkg/types/deposit.go +++ b/pkg/types/deposit.go @@ -1,11 +1,16 @@ package types -import "time" +import ( + "fmt" + "time" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) type DepositStatus string const ( - DepositOther = DepositStatus("") + // EMPTY string means not supported DepositPending = DepositStatus("pending") @@ -20,15 +25,36 @@ const ( ) type Deposit struct { - Time time.Time `json:"time"` - Amount float64 `json:"amount"` - Asset string `json:"asset"` - Address string `json:"address"` - AddressTag string `json:"addressTag"` - TransactionID string `json:"txId"` - Status DepositStatus `json:"status"` + GID int64 `json:"gid" db:"gid"` + Exchange ExchangeName `json:"exchange" db:"exchange"` + Time Time `json:"time" db:"time"` + Amount fixedpoint.Value `json:"amount" db:"amount"` + Asset string `json:"asset" db:"asset"` + Address string `json:"address" db:"address"` + AddressTag string `json:"addressTag"` + TransactionID string `json:"transactionID" db:"txn_id"` + Status DepositStatus `json:"status"` } func (d Deposit) EffectiveTime() time.Time { - return d.Time + return d.Time.Time() +} + +func (d Deposit) String() (o string) { + o = fmt.Sprintf("%s deposit %s %v <- ", d.Exchange, d.Asset, d.Amount) + + if len(d.AddressTag) > 0 { + o += fmt.Sprintf("%s (tag: %s) at %s", d.Address, d.AddressTag, d.Time.Time()) + } else { + o += fmt.Sprintf("%s at %s", d.Address, d.Time.Time()) + } + + if len(d.TransactionID) > 0 { + o += fmt.Sprintf("txID: %s", cutstr(d.TransactionID, 12, 4, 4)) + } + if len(d.Status) > 0 { + o += "status: " + string(d.Status) + } + + return o } diff --git a/pkg/types/error.go b/pkg/types/error.go new file mode 100644 index 0000000000..48ce67fac0 --- /dev/null +++ b/pkg/types/error.go @@ -0,0 +1,23 @@ +package types + +import "fmt" + +type OrderError struct { + error error + order Order +} + +func (e *OrderError) Error() string { + return fmt.Sprintf("%s exchange: %s orderID:%d", e.error.Error(), e.order.Exchange, e.order.OrderID) +} + +func (e *OrderError) Order() Order { + return e.order +} + +func NewOrderError(e error, o Order) error { + return &OrderError{ + error: e, + order: o, + } +} diff --git a/pkg/types/exchange.go b/pkg/types/exchange.go index 40dd56b165..5bff2689ca 100644 --- a/pkg/types/exchange.go +++ b/pkg/types/exchange.go @@ -2,34 +2,76 @@ package types import ( "context" + "database/sql/driver" + "encoding/json" + "fmt" "strings" "time" - "github.com/pkg/errors" + "github.com/c9s/bbgo/pkg/fixedpoint" ) const DateFormat = "2006-01-02" type ExchangeName string +func (n *ExchangeName) Value() (driver.Value, error) { + return n.String(), nil +} + +func (n *ExchangeName) UnmarshalJSON(data []byte) error { + var s string + if err := json.Unmarshal(data, &s); err != nil { + return err + } + + switch s { + case "max", "binance", "ftx", "okex": + *n = ExchangeName(s) + return nil + + } + + return fmt.Errorf("unknown or unsupported exchange name: %s, valid names are: max, binance, ftx", s) +} + func (n ExchangeName) String() string { return string(n) } const ( - ExchangeMax = ExchangeName("max") - ExchangeBinance = ExchangeName("binance") + ExchangeMax ExchangeName = "max" + ExchangeBinance ExchangeName = "binance" + ExchangeFTX ExchangeName = "ftx" + ExchangeOKEx ExchangeName = "okex" + ExchangeKucoin ExchangeName = "kucoin" + ExchangeBacktest ExchangeName = "backtest" ) +var SupportedExchanges = []ExchangeName{ + ExchangeMax, + ExchangeBinance, + ExchangeFTX, + ExchangeOKEx, + ExchangeKucoin, + // note: we are not using "backtest" +} + func ValidExchangeName(a string) (ExchangeName, error) { switch strings.ToLower(a) { case "max": return ExchangeMax, nil case "binance", "bn": return ExchangeBinance, nil + case "ftx": + return ExchangeFTX, nil + case "okex": + return ExchangeOKEx, nil + case "kucoin": + return ExchangeKucoin, nil } - return "", errors.New("invalid exchange name") + return "", fmt.Errorf("invalid exchange name: %s", a) } type Exchange interface { @@ -37,35 +79,70 @@ type Exchange interface { PlatformFeeCurrency() string - NewStream() Stream + ExchangeMarketDataService - QueryMarkets(ctx context.Context) (MarketMap, error) + ExchangeTradeService +} + +// ExchangeOrderQueryService provides an interface for querying the order status via order ID or client order ID +type ExchangeOrderQueryService interface { + QueryOrder(ctx context.Context, q OrderQuery) (*Order, error) +} +type ExchangeTradeService interface { QueryAccount(ctx context.Context) (*Account, error) QueryAccountBalances(ctx context.Context) (BalanceMap, error) - QueryKLines(ctx context.Context, symbol string, interval Interval, options KLineQueryOptions) ([]KLine, error) + SubmitOrders(ctx context.Context, orders ...SubmitOrder) (createdOrders OrderSlice, err error) + + QueryOpenOrders(ctx context.Context, symbol string) (orders []Order, err error) + + CancelOrders(ctx context.Context, orders ...Order) error +} +type ExchangeDefaultFeeRates interface { + DefaultFeeRates() ExchangeFee +} + +type ExchangeTradeHistoryService interface { QueryTrades(ctx context.Context, symbol string, options *TradeQueryOptions) ([]Trade, error) + QueryClosedOrders(ctx context.Context, symbol string, since, until time.Time, lastOrderID uint64) (orders []Order, err error) +} - QueryDepositHistory(ctx context.Context, asset string, since, until time.Time) (allDeposits []Deposit, err error) +type ExchangeMarketDataService interface { + NewStream() Stream - QueryWithdrawHistory(ctx context.Context, asset string, since, until time.Time) (allWithdraws []Withdraw, err error) + QueryMarkets(ctx context.Context) (MarketMap, error) - SubmitOrders(ctx context.Context, orders ...SubmitOrder) (createdOrders OrderSlice, err error) + QueryTicker(ctx context.Context, symbol string) (*Ticker, error) - QueryOpenOrders(ctx context.Context, symbol string) (orders []Order, err error) + QueryTickers(ctx context.Context, symbol ...string) (map[string]Ticker, error) - QueryClosedOrders(ctx context.Context, symbol string, since, until time.Time, lastOrderID uint64) (orders []Order, err error) + QueryKLines(ctx context.Context, symbol string, interval Interval, options KLineQueryOptions) ([]KLine, error) +} - CancelOrders(ctx context.Context, orders ...Order) error +type CustomIntervalProvider interface { + SupportedInterval() map[Interval]int + IsSupportedInterval(interval Interval) bool +} + +type ExchangeTransferService interface { + QueryDepositHistory(ctx context.Context, asset string, since, until time.Time) (allDeposits []Deposit, err error) + QueryWithdrawHistory(ctx context.Context, asset string, since, until time.Time) (allWithdraws []Withdraw, err error) +} + +type ExchangeWithdrawalService interface { + Withdraw(ctx context.Context, asset string, amount fixedpoint.Value, address string, options *WithdrawalOptions) error +} + +type ExchangeRewardService interface { + QueryRewards(ctx context.Context, startTime time.Time) ([]Reward, error) } type TradeQueryOptions struct { StartTime *time.Time EndTime *time.Time Limit int64 - LastTradeID int64 + LastTradeID uint64 } - diff --git a/pkg/types/exchange_icon.go b/pkg/types/exchange_icon.go new file mode 100644 index 0000000000..a85624a134 --- /dev/null +++ b/pkg/types/exchange_icon.go @@ -0,0 +1,20 @@ +package types + +func ExchangeFooterIcon(exName ExchangeName) string { + footerIcon := "" + + switch exName { + case ExchangeBinance: + footerIcon = "https://bin.bnbstatic.com/static/images/common/favicon.ico" + case ExchangeMax: + footerIcon = "https://max.maicoin.com/favicon-16x16.png" + case ExchangeFTX: + footerIcon = "https://ftx.com/favicon.ico?v=2" + case ExchangeOKEx: + footerIcon = "https://static.okex.com/cdn/assets/imgs/MjAxODg/D91A7323087D31A588E0D2A379DD7747.png" + case ExchangeKucoin: + footerIcon = "https://assets.staticimg.com/cms/media/7AV75b9jzr9S8H3eNuOuoqj8PwdUjaDQGKGczGqTS.png" + } + + return footerIcon +} diff --git a/pkg/types/float_map.go b/pkg/types/float_map.go new file mode 100644 index 0000000000..7491260735 --- /dev/null +++ b/pkg/types/float_map.go @@ -0,0 +1,42 @@ +package types + +type FloatMap map[string]float64 + +func (m FloatMap) Sum() float64 { + sum := 0.0 + for _, v := range m { + sum += v + } + return sum +} + +func (m FloatMap) MulScalar(x float64) FloatMap { + o := FloatMap{} + for k, v := range m { + o[k] = v * x + } + + return o +} +func (m FloatMap) DivScalar(x float64) FloatMap { + o := FloatMap{} + for k, v := range m { + o[k] = v / x + } + + return o +} + +func (m FloatMap) Normalize() FloatMap { + sum := m.Sum() + if sum == 0 { + panic("zero sum") + } + + o := FloatMap{} + for k, v := range m { + o[k] = v / sum + } + + return o +} diff --git a/pkg/types/float_slice.go b/pkg/types/float_slice.go new file mode 100644 index 0000000000..3d53e3bc7e --- /dev/null +++ b/pkg/types/float_slice.go @@ -0,0 +1,144 @@ +package types + +import ( + "math" + + "gonum.org/v1/gonum/floats" +) + +type Float64Slice []float64 + +func (s *Float64Slice) Push(v float64) { + *s = append(*s, v) +} + +func (s *Float64Slice) Pop(i int64) (v float64) { + v = (*s)[i] + *s = append((*s)[:i], (*s)[i+1:]...) + return v +} + +func (s Float64Slice) Max() float64 { + return floats.Max(s) +} + +func (s Float64Slice) Min() float64 { + return floats.Min(s) +} + +func (s Float64Slice) Sum() (sum float64) { + return floats.Sum(s) +} + +func (s Float64Slice) Mean() (mean float64) { + length := len(s) + if length == 0 { + panic("zero length slice") + } + return s.Sum() / float64(length) +} + +func (s Float64Slice) Tail(size int) Float64Slice { + length := len(s) + if length <= size { + win := make(Float64Slice, length) + copy(win, s) + return win + } + + win := make(Float64Slice, size) + copy(win, s[length-size:]) + return win +} + +func (s Float64Slice) Diff() (values Float64Slice) { + for i, v := range s { + if i == 0 { + values.Push(0) + continue + } + values.Push(v - s[i-1]) + } + return values +} + +func (s Float64Slice) PositiveValuesOrZero() (values Float64Slice) { + for _, v := range s { + values.Push(math.Max(v, 0)) + } + return values +} + +func (s Float64Slice) NegativeValuesOrZero() (values Float64Slice) { + for _, v := range s { + values.Push(math.Min(v, 0)) + } + return values +} + +func (s Float64Slice) Abs() (values Float64Slice) { + for _, v := range s { + values.Push(math.Abs(v)) + } + return values +} + +func (s Float64Slice) MulScalar(x float64) (values Float64Slice) { + for _, v := range s { + values.Push(v * x) + } + return values +} + +func (s Float64Slice) DivScalar(x float64) (values Float64Slice) { + for _, v := range s { + values.Push(v / x) + } + return values +} + +func (s Float64Slice) Mul(other Float64Slice) (values Float64Slice) { + if len(s) != len(other) { + panic("slice lengths do not match") + } + + for i, v := range s { + values.Push(v * other[i]) + } + + return values +} + +func (s Float64Slice) Dot(other Float64Slice) float64 { + return floats.Dot(s, other) +} + +func (s Float64Slice) Normalize() Float64Slice { + return s.DivScalar(s.Sum()) +} + +func (a *Float64Slice) Last() float64 { + length := len(*a) + if length > 0 { + return (*a)[length-1] + } + return 0.0 +} + +func (a *Float64Slice) Index(i int) float64 { + length := len(*a) + if length-i <= 0 || i < 0 { + return 0.0 + } + return (*a)[length-i-1] +} + +func (a *Float64Slice) Length() int { + return len(*a) +} + +func (a Float64Slice) Addr() *Float64Slice { + return &a +} + +var _ Series = Float64Slice([]float64{}).Addr() diff --git a/pkg/types/fundingrate.go b/pkg/types/fundingrate.go new file mode 100644 index 0000000000..3b99bc7394 --- /dev/null +++ b/pkg/types/fundingrate.go @@ -0,0 +1,13 @@ +package types + +import ( + "time" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type FundingRate struct { + FundingRate fixedpoint.Value + FundingTime time.Time + Time time.Time +} diff --git a/pkg/types/heikinashi_stream.go b/pkg/types/heikinashi_stream.go new file mode 100644 index 0000000000..f3cc351a09 --- /dev/null +++ b/pkg/types/heikinashi_stream.go @@ -0,0 +1,72 @@ +package types + +import ( + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +var Four fixedpoint.Value = fixedpoint.NewFromInt(4) + +type HeikinAshiStream struct { + StandardStreamEmitter + lastAshi map[string]map[Interval]*KLine + LastOrigin map[string]map[Interval]*KLine +} + +func (s *HeikinAshiStream) EmitKLineClosed(kline KLine) { + ashi := kline + if s.lastAshi == nil { + s.lastAshi = make(map[string]map[Interval]*KLine) + s.LastOrigin = make(map[string]map[Interval]*KLine) + } + if s.lastAshi[kline.Symbol] == nil { + s.lastAshi[kline.Symbol] = make(map[Interval]*KLine) + s.LastOrigin[kline.Symbol] = make(map[Interval]*KLine) + } + lastAshi := s.lastAshi[kline.Symbol][kline.Interval] + if lastAshi == nil { + ashi.Close = kline.Close.Add(kline.High). + Add(kline.Low). + Add(kline.Open). + Div(Four) + // High and Low are the same + s.lastAshi[kline.Symbol][kline.Interval] = &ashi + s.LastOrigin[kline.Symbol][kline.Interval] = &kline + } else { + ashi.Close = kline.Close.Add(kline.High). + Add(kline.Low). + Add(kline.Open). + Div(Four) + ashi.Open = lastAshi.Open.Add(lastAshi.Close).Div(Two) + // High and Low are the same + s.lastAshi[kline.Symbol][kline.Interval] = &ashi + s.LastOrigin[kline.Symbol][kline.Interval] = &kline + } + s.StandardStreamEmitter.EmitKLineClosed(ashi) +} + +// No writeback to lastAshi +func (s *HeikinAshiStream) EmitKLine(kline KLine) { + ashi := kline + if s.lastAshi == nil { + s.lastAshi = make(map[string]map[Interval]*KLine) + } + if s.lastAshi[kline.Symbol] == nil { + s.lastAshi[kline.Symbol] = make(map[Interval]*KLine) + } + lastAshi := s.lastAshi[kline.Symbol][kline.Interval] + if lastAshi == nil { + ashi.Close = kline.Close.Add(kline.High). + Add(kline.Low). + Add(kline.Open). + Div(Four) + } else { + ashi.Close = kline.Close.Add(kline.High). + Add(kline.Low). + Add(kline.Open). + Div(Four) + ashi.Open = lastAshi.Open.Add(lastAshi.Close).Div(Two) + } + s.StandardStreamEmitter.EmitKLine(ashi) +} + +var _ StandardStreamEmitter = &HeikinAshiStream{} diff --git a/pkg/types/indicator.go b/pkg/types/indicator.go new file mode 100644 index 0000000000..286e1493c8 --- /dev/null +++ b/pkg/types/indicator.go @@ -0,0 +1,568 @@ +package types + +import ( + "fmt" + "math" + "reflect" + + "gonum.org/v1/gonum/stat" +) + +// Super basic Series type that simply holds the float64 data +// with size limit (the only difference compare to float64slice) +type Queue struct { + arr []float64 + size int +} + +func NewQueue(size int) *Queue { + return &Queue{ + arr: make([]float64, 0, size), + size: size, + } +} + +func (inc *Queue) Last() float64 { + if len(inc.arr) == 0 { + return 0 + } + return inc.arr[len(inc.arr)-1] +} + +func (inc *Queue) Index(i int) float64 { + if len(inc.arr)-i-1 < 0 { + return 0 + } + return inc.arr[len(inc.arr)-i-1] +} + +func (inc *Queue) Length() int { + return len(inc.arr) +} + +func (inc *Queue) Update(v float64) { + inc.arr = append(inc.arr, v) + if len(inc.arr) > inc.size { + inc.arr = inc.arr[len(inc.arr)-inc.size:] + } +} + +var _ Series = &Queue{} + +// Float64Indicator is the indicators (SMA and EWMA) that we want to use are returning float64 data. +type Float64Indicator interface { + Last() float64 +} + +// The interface maps to pinescript basic type `series` +// Access the internal historical data from the latest to the oldest +// Index(0) always maps to Last() +type Series interface { + Last() float64 + Index(int) float64 + Length() int +} + +type UpdatableSeries interface { + Series + Update(float64) +} + +// The interface maps to pinescript basic type `series` for bool type +// Access the internal historical data from the latest to the oldest +// Index(0) always maps to Last() +type BoolSeries interface { + Last() bool + Index(int) bool + Length() int +} + +// Calculate sum of the series +// if limit is given, will only sum first limit numbers (a.Index[0..limit]) +// otherwise will sum all elements +func Sum(a Series, limit ...int) (sum float64) { + l := -1 + if len(limit) > 0 { + l = limit[0] + } + if l < a.Length() { + l = a.Length() + } + for i := 0; i < l; i++ { + sum += a.Index(i) + } + return sum +} + +// Calculate the average value of the series +// if limit is given, will only calculate the average of first limit numbers (a.Index[0..limit]) +// otherwise will operate on all elements +func Mean(a Series, limit ...int) (mean float64) { + l := -1 + if len(limit) > 0 { + l = limit[0] + } + if l < a.Length() { + l = a.Length() + } + return Sum(a, l) / float64(l) +} + +type AbsResult struct { + a Series +} + +func (a *AbsResult) Last() float64 { + return math.Abs(a.a.Last()) +} + +func (a *AbsResult) Index(i int) float64 { + return math.Abs(a.a.Index(i)) +} + +func (a *AbsResult) Length() int { + return a.a.Length() +} + +// Return series that having all the elements positive +func Abs(a Series) Series { + return &AbsResult{a} +} + +var _ Series = &AbsResult{} + +func Predict(a Series, lookback int, offset ...int) float64 { + if a.Length() < lookback { + lookback = a.Length() + } + x := make([]float64, lookback) + y := make([]float64, lookback) + var weights []float64 + for i := 0; i < lookback; i++ { + x[i] = float64(i) + y[i] = a.Index(i) + } + alpha, beta := stat.LinearRegression(x, y, weights, false) + o := -1.0 + if len(offset) > 0 { + o = -float64(offset[0]) + } + return alpha + beta*o +} + +// This will make prediction using Linear Regression to get the next cross point +// Return (offset from latest, crossed value, could cross) +// offset from latest should always be positive +// lookback param is to use at most `lookback` points to determine linear regression functions +// +// You may also refer to excel's FORECAST function +func NextCross(a Series, b Series, lookback int) (int, float64, bool) { + if a.Length() < lookback { + lookback = a.Length() + } + if b.Length() < lookback { + lookback = b.Length() + } + x := make([]float64, lookback) + y1 := make([]float64, lookback) + y2 := make([]float64, lookback) + var weights []float64 + for i := 0; i < lookback; i++ { + x[i] = float64(i) + y1[i] = a.Index(i) + y2[i] = b.Index(i) + } + alpha1, beta1 := stat.LinearRegression(x, y1, weights, false) + alpha2, beta2 := stat.LinearRegression(x, y2, weights, false) + if beta2 == beta1 { + return 0, 0, false + } + indexf := (alpha1 - alpha2) / (beta2 - beta1) + + // crossed in different direction + if indexf >= 0 { + return 0, 0, false + } + return int(math.Ceil(-indexf)), alpha1 + beta1*indexf, true +} + +// The result structure that maps to the crossing result of `CrossOver` and `CrossUnder` +// Accessible through BoolSeries interface +type CrossResult struct { + a Series + b Series + isOver bool +} + +func (c *CrossResult) Last() bool { + if c.Length() == 0 { + return false + } + if c.isOver { + return c.a.Last()-c.b.Last() > 0 && c.a.Index(1)-c.b.Index(1) < 0 + } else { + return c.a.Last()-c.b.Last() < 0 && c.a.Index(1)-c.b.Index(1) > 0 + } +} + +func (c *CrossResult) Index(i int) bool { + if i >= c.Length() { + return false + } + if c.isOver { + return c.a.Index(i)-c.b.Index(i) > 0 && c.a.Index(i+1)-c.b.Index(i+1) < 0 + } else { + return c.a.Index(i)-c.b.Index(i) < 0 && c.a.Index(i+1)-c.b.Index(i+1) > 0 + } +} + +func (c *CrossResult) Length() int { + la := c.a.Length() + lb := c.b.Length() + if la > lb { + return lb + } + return la +} + +// a series cross above b series. +// If in current KLine, a is higher than b, and in previous KLine, a is lower than b, then return true. +// Otherwise return false. +// If accessing index <= length, will always return false +func CrossOver(a Series, b Series) BoolSeries { + return &CrossResult{a, b, true} +} + +// a series cross under b series. +// If in current KLine, a is lower than b, and in previous KLine, a is higher than b, then return true. +// Otherwise return false. +// If accessing index <= length, will always return false +func CrossUnder(a Series, b Series) BoolSeries { + return &CrossResult{a, b, false} +} + +func Highest(a Series, lookback int) float64 { + if lookback > a.Length() { + lookback = a.Length() + } + highest := a.Last() + for i := 1; i < lookback; i++ { + current := a.Index(i) + if highest < current { + highest = current + } + } + return highest +} + +func Lowest(a Series, lookback int) float64 { + if lookback > a.Length() { + lookback = a.Length() + } + lowest := a.Last() + for i := 1; i < lookback; i++ { + current := a.Index(i) + if lowest > current { + lowest = current + } + } + return lowest +} + +type NumberSeries float64 + +func (a NumberSeries) Last() float64 { + return float64(a) +} + +func (a NumberSeries) Index(_ int) float64 { + return float64(a) +} + +func (a NumberSeries) Length() int { + return math.MaxInt32 +} + +var _ Series = NumberSeries(0) + +type AddSeriesResult struct { + a Series + b Series +} + +// Add two series, result[i] = a[i] + b[i] +func Add(a interface{}, b interface{}) Series { + var aa Series + var bb Series + + switch tp := a.(type) { + case float64: + aa = NumberSeries(tp) + case Series: + aa = tp + default: + panic("input should be either *Series or float64") + + } + switch tp := b.(type) { + case float64: + bb = NumberSeries(tp) + case Series: + bb = tp + default: + panic("input should be either *Series or float64") + + } + return &AddSeriesResult{aa, bb} +} + +func (a *AddSeriesResult) Last() float64 { + return a.a.Last() + a.b.Last() +} + +func (a *AddSeriesResult) Index(i int) float64 { + return a.a.Index(i) + a.b.Index(i) +} + +func (a *AddSeriesResult) Length() int { + lengtha := a.a.Length() + lengthb := a.b.Length() + if lengtha < lengthb { + return lengtha + } + return lengthb +} + +var _ Series = &AddSeriesResult{} + +type MinusSeriesResult struct { + a Series + b Series +} + +// Minus two series, result[i] = a[i] - b[i] +func Minus(a interface{}, b interface{}) Series { + aa := switchIface(a) + bb := switchIface(b) + return &MinusSeriesResult{aa, bb} +} + +func (a *MinusSeriesResult) Last() float64 { + return a.a.Last() - a.b.Last() +} + +func (a *MinusSeriesResult) Index(i int) float64 { + return a.a.Index(i) - a.b.Index(i) +} + +func (a *MinusSeriesResult) Length() int { + lengtha := a.a.Length() + lengthb := a.b.Length() + if lengtha < lengthb { + return lengtha + } + return lengthb +} + +var _ Series = &MinusSeriesResult{} + +func switchIface(b interface{}) Series { + switch tp := b.(type) { + case float64: + return NumberSeries(tp) + case int32: + return NumberSeries(float64(tp)) + case int64: + return NumberSeries(float64(tp)) + case float32: + return NumberSeries(float64(tp)) + case int: + return NumberSeries(float64(tp)) + case Series: + return tp + default: + fmt.Println(reflect.TypeOf(b)) + panic("input should be either *Series or float64") + + } +} + +// Divid two series, result[i] = a[i] / b[i] +func Div(a interface{}, b interface{}) Series { + aa := switchIface(a) + if 0 == b { + panic("Divid by zero exception") + } + bb := switchIface(b) + return &DivSeriesResult{aa, bb} + +} + +type DivSeriesResult struct { + a Series + b Series +} + +func (a *DivSeriesResult) Last() float64 { + return a.a.Last() / a.b.Last() +} + +func (a *DivSeriesResult) Index(i int) float64 { + return a.a.Index(i) / a.b.Index(i) +} + +func (a *DivSeriesResult) Length() int { + lengtha := a.a.Length() + lengthb := a.b.Length() + if lengtha < lengthb { + return lengtha + } + return lengthb +} + +var _ Series = &DivSeriesResult{} + +// Multiple two series, result[i] = a[i] * b[i] +func Mul(a interface{}, b interface{}) Series { + var aa Series + var bb Series + + switch tp := a.(type) { + case float64: + aa = NumberSeries(tp) + case Series: + aa = tp + default: + panic("input should be either Series or float64") + } + switch tp := b.(type) { + case float64: + bb = NumberSeries(tp) + case Series: + bb = tp + default: + panic("input should be either Series or float64") + + } + return &MulSeriesResult{aa, bb} + +} + +type MulSeriesResult struct { + a Series + b Series +} + +func (a *MulSeriesResult) Last() float64 { + return a.a.Last() * a.b.Last() +} + +func (a *MulSeriesResult) Index(i int) float64 { + return a.a.Index(i) * a.b.Index(i) +} + +func (a *MulSeriesResult) Length() int { + lengtha := a.a.Length() + lengthb := a.b.Length() + if lengtha < lengthb { + return lengtha + } + return lengthb +} + +var _ Series = &MulSeriesResult{} + +// Calculate (a dot b). +// if limit is given, will only calculate the first limit numbers (a.Index[0..limit]) +// otherwise will operate on all elements +func Dot(a interface{}, b interface{}, limit ...int) float64 { + return Sum(Mul(a, b), limit...) +} + +// Extract elements from the Series to a float64 array, following the order of Index(0..limit) +// if limit is given, will only take the first limit numbers (a.Index[0..limit]) +// otherwise will operate on all elements +func ToArray(a Series, limit ...int) (result []float64) { + l := -1 + if len(limit) > 0 { + l = limit[0] + } + if l < a.Length() { + l = a.Length() + } + result = make([]float64, l) + for i := 0; i < l; i++ { + result[i] = a.Index(i) + } + return +} + +// Similar to ToArray but in reverse order. +// Useful when you want to cache series' calculated result as float64 array +// the then reuse the result in multiple places (so that no recalculation will be triggered) +// +// notice that the return type is a Float64Slice, which implements the Series interface +func ToReverseArray(a Series, limit ...int) (result Float64Slice) { + l := -1 + if len(limit) > 0 { + l = limit[0] + } + if l < a.Length() { + l = a.Length() + } + result = make([]float64, l) + for i := 0; i < l; i++ { + result[l-i-1] = a.Index(i) + } + return +} + +type ChangeResult struct { + a Series + offset int +} + +func (c *ChangeResult) Last() float64 { + if c.offset >= c.a.Length() { + return 0 + } + return c.a.Last() - c.a.Index(c.offset) +} + +func (c *ChangeResult) Index(i int) float64 { + if i+c.offset >= c.a.Length() { + return 0 + } + return c.a.Index(i) - c.a.Index(i+c.offset) +} + +func (c *ChangeResult) Length() int { + length := c.a.Length() + if length >= c.offset { + return length - c.offset + } + return 0 +} + +// Difference between current value and previous, a - a[offset] +// offset: if not given, offset is 1. +func Change(a Series, offset ...int) Series { + o := 1 + if len(offset) > 0 { + o = offset[0] + } + + return &ChangeResult{a, o} +} + +func Stdev(a Series, length int) float64 { + avg := Mean(a, length) + s := .0 + for i := 0; i < length; i++ { + diff := a.Index(i) - avg + s += diff * diff + } + return math.Sqrt(s / float64(length)) +} + +// TODO: ta.linreg diff --git a/pkg/types/indicator_test.go b/pkg/types/indicator_test.go new file mode 100644 index 0000000000..8919c78661 --- /dev/null +++ b/pkg/types/indicator_test.go @@ -0,0 +1,35 @@ +package types + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestFloat(t *testing.T) { + var a Series = Minus(3., 2.) + assert.Equal(t, a.Last(), 1.) + assert.Equal(t, a.Index(100), 1.) +} + +func TestNextCross(t *testing.T) { + var a Series = NumberSeries(1.2) + + var b Series = &Float64Slice{100., 80., 60.} + // index 2 1 0 + // predicted 40 20 0 + // offset 1 2 3 + + index, value, ok := NextCross(a, b, 3) + assert.True(t, ok) + assert.Equal(t, value, 1.2) + assert.Equal(t, index, 3) // 2.94, ceil +} + +func TestFloat64Slice(t *testing.T) { + var a = Float64Slice{1.0, 2.0, 3.0} + var b = Float64Slice{1.0, 2.0, 3.0} + var c Series = Minus(&a, &b) + a = append(a, 4.0) + b = append(b, 3.0) + assert.Equal(t, c.Last(), 1.) +} diff --git a/pkg/types/interval.go b/pkg/types/interval.go index caaf14be3c..a06e083e57 100644 --- a/pkg/types/interval.go +++ b/pkg/types/interval.go @@ -69,10 +69,15 @@ var SupportedIntervals = map[Interval]int{ // IntervalWindow is used by the indicators type IntervalWindow struct { // The interval of kline - Interval Interval + Interval Interval `json:"interval"` // The windows size of the indicator (EWMA and SMA) - Window int + Window int `json:"window"` +} + +type IntervalWindowBandWidth struct { + IntervalWindow + BandWidth float64 `json:"bandWidth"` } func (iw IntervalWindow) String() string { diff --git a/pkg/types/kline.go b/pkg/types/kline.go index b45bb67d0c..6a92ffdf39 100644 --- a/pkg/types/kline.go +++ b/pkg/types/kline.go @@ -2,11 +2,11 @@ package types import ( "fmt" - "math" "time" "github.com/slack-go/slack" + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/util" ) @@ -16,23 +16,25 @@ const DirectionUp = 1 const DirectionNone = 0 const DirectionDown = -1 +var Two = fixedpoint.NewFromInt(2) + type KLineOrWindow interface { GetInterval() string Direction() Direction - GetChange() float64 - GetMaxChange() float64 - GetThickness() float64 + GetChange() fixedpoint.Value + GetMaxChange() fixedpoint.Value + GetThickness() fixedpoint.Value - Mid() float64 - GetOpen() float64 - GetClose() float64 - GetHigh() float64 - GetLow() float64 + Mid() fixedpoint.Value + GetOpen() fixedpoint.Value + GetClose() fixedpoint.Value + GetHigh() fixedpoint.Value + GetLow() fixedpoint.Value BounceUp() bool BounceDown() bool - GetUpperShadowRatio() float64 - GetLowerShadowRatio() float64 + GetUpperShadowRatio() fixedpoint.Value + GetLowerShadowRatio() fixedpoint.Value SlackAttachment() slack.Attachment } @@ -45,33 +47,35 @@ type KLineQueryOptions struct { // KLine uses binance's kline as the standard structure type KLine struct { - GID uint64 `json:"gid" db:"gid"` - Exchange string `json:"exchange" db:"exchange"` + GID uint64 `json:"gid" db:"gid"` + Exchange ExchangeName `json:"exchange" db:"exchange"` Symbol string `json:"symbol" db:"symbol"` - StartTime time.Time `json:"startTime" db:"start_time"` - EndTime time.Time `json:"endTime" db:"end_time"` + StartTime Time `json:"startTime" db:"start_time"` + EndTime Time `json:"endTime" db:"end_time"` Interval Interval `json:"interval" db:"interval"` - Open float64 `json:"open" db:"open"` - Close float64 `json:"close" db:"close"` - High float64 `json:"high" db:"high"` - Low float64 `json:"low" db:"low"` - Volume float64 `json:"volume" db:"volume"` - QuoteVolume float64 `json:"quoteVolume" db:"quote_volume"` + Open fixedpoint.Value `json:"open" db:"open"` + Close fixedpoint.Value `json:"close" db:"close"` + High fixedpoint.Value `json:"high" db:"high"` + Low fixedpoint.Value `json:"low" db:"low"` + Volume fixedpoint.Value `json:"volume" db:"volume"` + QuoteVolume fixedpoint.Value `json:"quoteVolume" db:"quote_volume"` + TakerBuyBaseAssetVolume fixedpoint.Value `json:"takerBuyBaseAssetVolume" db:"taker_buy_base_volume"` + TakerBuyQuoteAssetVolume fixedpoint.Value `json:"takerBuyQuoteAssetVolume" db:"taker_buy_quote_volume"` LastTradeID uint64 `json:"lastTradeID" db:"last_trade_id"` NumberOfTrades uint64 `json:"numberOfTrades" db:"num_trades"` Closed bool `json:"closed" db:"closed"` } -func (k KLine) GetStartTime() time.Time { +func (k KLine) GetStartTime() Time { return k.StartTime } -func (k KLine) GetEndTime() time.Time { +func (k KLine) GetEndTime() Time { return k.EndTime } @@ -79,109 +83,133 @@ func (k KLine) GetInterval() Interval { return k.Interval } -func (k KLine) Mid() float64 { - return (k.High + k.Low) / 2 +func (k KLine) Mid() fixedpoint.Value { + return k.High.Add(k.Low).Div(Two) } // green candle with open and close near high price func (k KLine) BounceUp() bool { mid := k.Mid() trend := k.Direction() - return trend > 0 && k.Open > mid && k.Close > mid + return trend > 0 && k.Open.Compare(mid) > 0 && k.Close.Compare(mid) > 0 } // red candle with open and close near low price func (k KLine) BounceDown() bool { mid := k.Mid() trend := k.Direction() - return trend > 0 && k.Open < mid && k.Close < mid + return trend > 0 && k.Open.Compare(mid) < 0 && k.Close.Compare(mid) < 0 } func (k KLine) Direction() Direction { o := k.GetOpen() c := k.GetClose() - if c > o { + if c.Compare(o) > 0 { return DirectionUp - } else if c < o { + } else if c.Compare(o) < 0 { return DirectionDown } return DirectionNone } -func (k KLine) GetHigh() float64 { +func (k KLine) GetHigh() fixedpoint.Value { return k.High } -func (k KLine) GetLow() float64 { +func (k KLine) GetLow() fixedpoint.Value { return k.Low } -func (k KLine) GetOpen() float64 { +func (k KLine) GetOpen() fixedpoint.Value { return k.Open } -func (k KLine) GetClose() float64 { +func (k KLine) GetClose() fixedpoint.Value { return k.Close } -func (k KLine) GetMaxChange() float64 { - return k.GetHigh() - k.GetLow() +func (k KLine) GetMaxChange() fixedpoint.Value { + return k.GetHigh().Sub(k.GetLow()) +} + +func (k KLine) GetAmplification() fixedpoint.Value { + return k.GetMaxChange().Div(k.GetLow()) } // GetThickness returns the thickness of the kline. 1 => thick, 0.1 => thin -func (k KLine) GetThickness() float64 { - return math.Abs(k.GetChange()) / math.Abs(k.GetMaxChange()) +func (k KLine) GetThickness() fixedpoint.Value { + out := k.GetChange().Div(k.GetMaxChange()) + if out.Sign() < 0 { + return out.Neg() + } + return out } -func (k KLine) GetUpperShadowRatio() float64 { - return k.GetUpperShadowHeight() / math.Abs(k.GetMaxChange()) +func (k KLine) GetUpperShadowRatio() fixedpoint.Value { + out := k.GetUpperShadowHeight().Div(k.GetMaxChange()) + if out.Sign() < 0 { + return out.Neg() + } + return out } -func (k KLine) GetUpperShadowHeight() float64 { +func (k KLine) GetUpperShadowHeight() fixedpoint.Value { high := k.GetHigh() - if k.GetOpen() > k.GetClose() { - return high - k.GetOpen() + open := k.GetOpen() + clos := k.GetClose() + if open.Compare(clos) > 0 { + return high.Sub(open) } - return high - k.GetClose() + return high.Sub(clos) } -func (k KLine) GetLowerShadowRatio() float64 { - return k.GetLowerShadowHeight() / math.Abs(k.GetMaxChange()) +func (k KLine) GetLowerShadowRatio() fixedpoint.Value { + out := k.GetLowerShadowHeight().Div(k.GetMaxChange()) + if out.Sign() < 0 { + return out.Neg() + } + return out } -func (k KLine) GetLowerShadowHeight() float64 { +func (k KLine) GetLowerShadowHeight() fixedpoint.Value { low := k.Low - if k.Open < k.Close { - return k.Open - low + if k.Open.Compare(k.Close) < 0 { // uptrend + return k.Open.Sub(low) } - return k.Close - low + + // downtrend + return k.Close.Sub(low) } // GetBody returns the height of the candle real body -func (k KLine) GetBody() float64 { +func (k KLine) GetBody() fixedpoint.Value { return k.GetChange() } // GetChange returns Close price - Open price. -func (k KLine) GetChange() float64 { - return k.Close - k.Open -} - -func (k KLine) String() string { - return fmt.Sprintf("%s %s %s %s Open: %.8f Close: %.8f High: %.8f Low: %.8f Volume: %.8f Change: %.4f Max Change: %.4f", - k.Exchange, - k.StartTime.Format("2006-01-02 15:04"), - k.Symbol, k.Interval, k.Open, k.Close, k.High, k.Low, k.Volume, k.GetChange(), k.GetMaxChange()) +func (k KLine) GetChange() fixedpoint.Value { + return k.Close.Sub(k.Open) } func (k KLine) Color() string { if k.Direction() > 0 { - return Green + return GreenColor } else if k.Direction() < 0 { - return Red + return RedColor } - return "#f0f0f0" + return GrayColor +} + +func (k KLine) String() string { + return fmt.Sprintf("%s %s %s %s O: %.4f H: %.4f L: %.4f C: %.4f CHG: %.4f MAXCHG: %.4f V: %.4f QV: %.2f TBBV: %.2f", + k.Exchange.String(), + k.StartTime.Time().Format("2006-01-02 15:04"), + k.Symbol, k.Interval, k.Open.Float64(), k.High.Float64(), k.Low.Float64(), k.Close.Float64(), k.GetChange().Float64(), k.GetMaxChange().Float64(), k.Volume.Float64(), k.QuoteVolume.Float64(), k.TakerBuyBaseAssetVolume.Float64()) +} + +func (k KLine) PlainText() string { + return k.String() } func (k KLine) SlackAttachment() slack.Attachment { @@ -189,26 +217,29 @@ func (k KLine) SlackAttachment() slack.Attachment { Text: fmt.Sprintf("*%s* KLine %s", k.Symbol, k.Interval), Color: k.Color(), Fields: []slack.AttachmentField{ - {Title: "Open", Value: util.FormatFloat(k.Open, 2), Short: true}, - {Title: "High", Value: util.FormatFloat(k.High, 2), Short: true}, - {Title: "Low", Value: util.FormatFloat(k.Low, 2), Short: true}, - {Title: "Close", Value: util.FormatFloat(k.Close, 2), Short: true}, - {Title: "Mid", Value: util.FormatFloat(k.Mid(), 2), Short: true}, - {Title: "Change", Value: util.FormatFloat(k.GetChange(), 2), Short: true}, - {Title: "Max Change", Value: util.FormatFloat(k.GetMaxChange(), 2), Short: true}, + {Title: "Open", Value: util.FormatValue(k.Open, 2), Short: true}, + {Title: "High", Value: util.FormatValue(k.High, 2), Short: true}, + {Title: "Low", Value: util.FormatValue(k.Low, 2), Short: true}, + {Title: "Close", Value: util.FormatValue(k.Close, 2), Short: true}, + {Title: "Mid", Value: util.FormatValue(k.Mid(), 2), Short: true}, + {Title: "Change", Value: util.FormatValue(k.GetChange(), 2), Short: true}, + {Title: "Volume", Value: util.FormatValue(k.Volume, 2), Short: true}, + {Title: "Taker Buy Base Volume", Value: util.FormatValue(k.TakerBuyBaseAssetVolume, 2), Short: true}, + {Title: "Taker Buy Quote Volume", Value: util.FormatValue(k.TakerBuyQuoteAssetVolume, 2), Short: true}, + {Title: "Max Change", Value: util.FormatValue(k.GetMaxChange(), 2), Short: true}, { Title: "Thickness", - Value: util.FormatFloat(k.GetThickness(), 4), + Value: util.FormatValue(k.GetThickness(), 4), Short: true, }, { Title: "UpperShadowRatio", - Value: util.FormatFloat(k.GetUpperShadowRatio(), 4), + Value: util.FormatValue(k.GetUpperShadowRatio(), 4), Short: true, }, { Title: "LowerShadowRatio", - Value: util.FormatFloat(k.GetLowerShadowRatio(), 4), + Value: util.FormatValue(k.GetLowerShadowRatio(), 4), Short: true, }, }, @@ -220,11 +251,11 @@ func (k KLine) SlackAttachment() slack.Attachment { type KLineWindow []KLine // ReduceClose reduces the closed prices -func (k KLineWindow) ReduceClose() float64 { - s := 0.0 +func (k KLineWindow) ReduceClose() fixedpoint.Value { + s := fixedpoint.Zero for _, kline := range k { - s += kline.GetClose() + s = s.Add(kline.GetClose()) } return s @@ -246,39 +277,43 @@ func (k KLineWindow) GetInterval() Interval { return k.First().Interval } -func (k KLineWindow) GetOpen() float64 { +func (k KLineWindow) GetOpen() fixedpoint.Value { return k.First().GetOpen() } -func (k KLineWindow) GetClose() float64 { +func (k KLineWindow) GetClose() fixedpoint.Value { end := len(k) - 1 return k[end].GetClose() } -func (k KLineWindow) GetHigh() float64 { - high := k.GetOpen() +func (k KLineWindow) GetHigh() fixedpoint.Value { + high := k.First().GetHigh() for _, line := range k { - high = math.Max(high, line.GetHigh()) + high = fixedpoint.Max(high, line.GetHigh()) } return high } -func (k KLineWindow) GetLow() float64 { - low := k.GetOpen() +func (k KLineWindow) GetLow() fixedpoint.Value { + low := k.First().GetLow() for _, line := range k { - low = math.Min(low, line.GetLow()) + low = fixedpoint.Min(low, line.GetLow()) } return low } -func (k KLineWindow) GetChange() float64 { - return k.GetClose() - k.GetOpen() +func (k KLineWindow) GetChange() fixedpoint.Value { + return k.GetClose().Sub(k.GetOpen()) +} + +func (k KLineWindow) GetMaxChange() fixedpoint.Value { + return k.GetHigh().Sub(k.GetLow()) } -func (k KLineWindow) GetMaxChange() float64 { - return k.GetHigh() - k.GetLow() +func (k KLineWindow) GetAmplification() fixedpoint.Value { + return k.GetMaxChange().Div(k.GetLow()) } func (k KLineWindow) AllDrop() bool { @@ -303,9 +338,9 @@ func (k KLineWindow) GetTrend() int { o := k.GetOpen() c := k.GetClose() - if c > o { + if c.Compare(o) > 0 { return 1 - } else if c < o { + } else if c.Compare(o) < 0 { return -1 } return 0 @@ -313,29 +348,30 @@ func (k KLineWindow) GetTrend() int { func (k KLineWindow) Color() string { if k.GetTrend() > 0 { - return Green + return GreenColor } else if k.GetTrend() < 0 { - return Red + return RedColor } - return "#f0f0f0" + return GrayColor } -func (k KLineWindow) Mid() float64 { - return k.GetHigh() - k.GetLow()/2 +// Mid price +func (k KLineWindow) Mid() fixedpoint.Value { + return k.GetHigh().Add(k.GetLow()).Div(Two) } -// green candle with open and close near high price +// BounceUp returns true if it's green candle with open and close near high price func (k KLineWindow) BounceUp() bool { mid := k.Mid() trend := k.GetTrend() - return trend > 0 && k.GetOpen() > mid && k.GetClose() > mid + return trend > 0 && k.GetOpen().Compare(mid) > 0 && k.GetClose().Compare(mid) > 0 } -// red candle with open and close near low price +// BounceDown returns true red candle with open and close near low price func (k KLineWindow) BounceDown() bool { mid := k.Mid() trend := k.GetTrend() - return trend > 0 && k.GetOpen() < mid && k.GetClose() < mid + return trend > 0 && k.GetOpen().Compare(mid) < 0 && k.GetClose().Compare(mid) < 0 } func (k *KLineWindow) Add(line KLine) { @@ -355,7 +391,7 @@ func (k KLineWindow) Tail(size int) KLineWindow { } win := make(KLineWindow, size) - copy(win, k[length-1-size:]) + copy(win, k[length-size:]) return win } @@ -374,36 +410,52 @@ func (k *KLineWindow) Truncate(size int) { *k = kn } -func (k KLineWindow) GetBody() float64 { +func (k KLineWindow) GetBody() fixedpoint.Value { return k.GetChange() } -func (k KLineWindow) GetThickness() float64 { - return math.Abs(k.GetChange()) / math.Abs(k.GetMaxChange()) +func (k KLineWindow) GetThickness() fixedpoint.Value { + out := k.GetChange().Div(k.GetMaxChange()) + if out.Sign() < 0 { + return out.Neg() + } + return out } -func (k KLineWindow) GetUpperShadowRatio() float64 { - return k.GetUpperShadowHeight() / math.Abs(k.GetMaxChange()) +func (k KLineWindow) GetUpperShadowRatio() fixedpoint.Value { + out := k.GetUpperShadowHeight().Div(k.GetMaxChange()) + if out.Sign() < 0 { + return out.Neg() + } + return out } -func (k KLineWindow) GetUpperShadowHeight() float64 { +func (k KLineWindow) GetUpperShadowHeight() fixedpoint.Value { high := k.GetHigh() - if k.GetOpen() > k.GetClose() { - return high - k.GetOpen() + open := k.GetOpen() + clos := k.GetClose() + if open.Compare(clos) > 0 { + return high.Sub(open) } - return high - k.GetClose() + return high.Sub(clos) } -func (k KLineWindow) GetLowerShadowRatio() float64 { - return k.GetLowerShadowHeight() / math.Abs(k.GetMaxChange()) +func (k KLineWindow) GetLowerShadowRatio() fixedpoint.Value { + out := k.GetLowerShadowHeight().Div(k.GetMaxChange()) + if out.Sign() < 0 { + return out.Neg() + } + return out } -func (k KLineWindow) GetLowerShadowHeight() float64 { +func (k KLineWindow) GetLowerShadowHeight() fixedpoint.Value { low := k.GetLow() - if k.GetOpen() < k.GetClose() { - return k.GetOpen() - low + open := k.GetOpen() + clos := k.GetClose() + if open.Compare(clos) < 0 { + return open.Sub(low) } - return k.GetClose() - low + return clos.Sub(low) } func (k KLineWindow) SlackAttachment() slack.Attachment { @@ -419,34 +471,34 @@ func (k KLineWindow) SlackAttachment() slack.Attachment { Text: fmt.Sprintf("*%s* KLineWindow %s x %d", first.Symbol, first.Interval, windowSize), Color: k.Color(), Fields: []slack.AttachmentField{ - {Title: "Open", Value: util.FormatFloat(k.GetOpen(), 2), Short: true}, - {Title: "High", Value: util.FormatFloat(k.GetHigh(), 2), Short: true}, - {Title: "Low", Value: util.FormatFloat(k.GetLow(), 2), Short: true}, - {Title: "Close", Value: util.FormatFloat(k.GetClose(), 2), Short: true}, - {Title: "Mid", Value: util.FormatFloat(k.Mid(), 2), Short: true}, + {Title: "Open", Value: util.FormatValue(k.GetOpen(), 2), Short: true}, + {Title: "High", Value: util.FormatValue(k.GetHigh(), 2), Short: true}, + {Title: "Low", Value: util.FormatValue(k.GetLow(), 2), Short: true}, + {Title: "Close", Value: util.FormatValue(k.GetClose(), 2), Short: true}, + {Title: "Mid", Value: util.FormatValue(k.Mid(), 2), Short: true}, { Title: "Change", - Value: util.FormatFloat(k.GetChange(), 2), + Value: util.FormatValue(k.GetChange(), 2), Short: true, }, { Title: "Max Change", - Value: util.FormatFloat(k.GetMaxChange(), 2), + Value: util.FormatValue(k.GetMaxChange(), 2), Short: true, }, { Title: "Thickness", - Value: util.FormatFloat(k.GetThickness(), 4), + Value: util.FormatValue(k.GetThickness(), 4), Short: true, }, { Title: "UpperShadowRatio", - Value: util.FormatFloat(k.GetUpperShadowRatio(), 4), + Value: util.FormatValue(k.GetUpperShadowRatio(), 4), Short: true, }, { Title: "LowerShadowRatio", - Value: util.FormatFloat(k.GetLowerShadowRatio(), 4), + Value: util.FormatValue(k.GetLowerShadowRatio(), 4), Short: true, }, }, @@ -456,3 +508,99 @@ func (k KLineWindow) SlackAttachment() slack.Attachment { } type KLineCallback func(kline KLine) + +type KValueType int + +const ( + kOpUnknown KValueType = iota + kOpenValue + kCloseValue + kHighValue + kLowValue + kVolumeValue +) + +func (k *KLineWindow) High() Series { + return &KLineSeries{ + lines: k, + kv: kHighValue, + } +} + +func (k *KLineWindow) Low() Series { + return &KLineSeries{ + lines: k, + kv: kLowValue, + } +} + +func (k *KLineWindow) Open() Series { + return &KLineSeries{ + lines: k, + kv: kOpenValue, + } +} + +func (k *KLineWindow) Close() Series { + return &KLineSeries{ + lines: k, + kv: kCloseValue, + } +} + +func (k *KLineWindow) Volume() Series { + return &KLineSeries{ + lines: k, + kv: kVolumeValue, + } +} + +type KLineSeries struct { + lines *KLineWindow + kv KValueType +} + +func (k *KLineSeries) Last() float64 { + length := len(*k.lines) + switch k.kv { + case kOpUnknown: + panic("kline series operator unknown") + case kOpenValue: + return (*k.lines)[length-1].GetOpen().Float64() + case kCloseValue: + return (*k.lines)[length-1].GetClose().Float64() + case kLowValue: + return (*k.lines)[length-1].GetLow().Float64() + case kHighValue: + return (*k.lines)[length-1].GetHigh().Float64() + case kVolumeValue: + return (*k.lines)[length-1].Volume.Float64() + } + return 0 +} + +func (k *KLineSeries) Index(i int) float64 { + length := len(*k.lines) + if length == 0 || length-i-1 < 0 { + return 0 + } + switch k.kv { + case kOpenValue: + return (*k.lines)[length-i-1].GetOpen().Float64() + case kCloseValue: + return (*k.lines)[length-i-1].GetClose().Float64() + case kLowValue: + return (*k.lines)[length-i-1].GetLow().Float64() + case kHighValue: + return (*k.lines)[length-i-1].GetHigh().Float64() + case kVolumeValue: + return (*k.lines)[length-i-1].Volume.Float64() + } + return 0 +} + +func (k *KLineSeries) Length() int { + return len(*k.lines) +} + +var _ Series = &KLineSeries{} diff --git a/pkg/types/kline_test.go b/pkg/types/kline_test.go index 3e7af5f07f..f125f39d9c 100644 --- a/pkg/types/kline_test.go +++ b/pkg/types/kline_test.go @@ -1,44 +1,58 @@ package types import ( - "testing" - + "encoding/json" "github.com/stretchr/testify/assert" + "testing" ) func TestKLineWindow_Tail(t *testing.T) { - var win = KLineWindow{ + var jsonWin = []byte(`[ + {"open": 11600.0, "close": 11600.0, "high": 11600.0, "low": 11600.0}, + {"open": 11700.0, "close": 11700.0, "high": 11700.0, "low": 11700.0} + ]`) + var win KLineWindow + err := json.Unmarshal(jsonWin, &win) + assert.NoError(t, err) + + /*{ {Open: 11600.0, Close: 11600.0, High: 11600.0, Low: 11600.0}, - {Open: 11600.0, Close: 11600.0, High: 11600.0, Low: 11600.0}, - } + {Open: 11700.0, Close: 11700.0, High: 11700.0, Low: 11700.0}, + }*/ var win2 = win.Tail(1) assert.Len(t, win2, 1) + assert.ElementsMatch(t, win2, win[1:]) var win3 = win.Tail(2) assert.Len(t, win3, 2) + assert.ElementsMatch(t, win3, win) var win4 = win.Tail(3) assert.Len(t, win4, 2) + assert.ElementsMatch(t, win4, win) } func TestKLineWindow_Truncate(t *testing.T) { - var win = KLineWindow{ - {Open: 11600.0, Close: 11600.0, High: 11600.0, Low: 11600.0}, - {Open: 11601.0, Close: 11600.0, High: 11600.0, Low: 11600.0}, - {Open: 11602.0, Close: 11600.0, High: 11600.0, Low: 11600.0}, - {Open: 11603.0, Close: 11600.0, High: 11600.0, Low: 11600.0}, - } + var jsonWin = []byte(`[ + {"open": 11600.0, "close": 11600.0, "high": 11600.0, "low": 11600.0}, + {"open": 11601.0, "close": 11600.0, "high": 11600.0, "low": 11600.0}, + {"open": 11602.0, "close": 11600.0, "high": 11600.0, "low": 11600.0}, + {"open": 11603.0, "close": 11600.0, "high": 11600.0, "low": 11600.0} + ]`) + var win KLineWindow + err := json.Unmarshal(jsonWin, &win) + assert.NoError(t, err) win.Truncate(5) assert.Len(t, win, 4) - assert.Equal(t, 11603.0, win.Last().Open) + assert.Equal(t, 11603.0, win.Last().Open.Float64()) win.Truncate(3) assert.Len(t, win, 3) - assert.Equal(t, 11603.0, win.Last().Open) + assert.Equal(t, 11603.0, win.Last().Open.Float64()) win.Truncate(1) assert.Len(t, win, 1) - assert.Equal(t, 11603.0, win.Last().Open) + assert.Equal(t, 11603.0, win.Last().Open.Float64()) } diff --git a/pkg/types/margin.go b/pkg/types/margin.go index b14d867dc3..fbec9e2a0f 100644 --- a/pkg/types/margin.go +++ b/pkg/types/margin.go @@ -1,10 +1,116 @@ package types +import ( + "context" + "time" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type FuturesExchange interface { + UseFutures() + UseIsolatedFutures(symbol string) + GetFuturesSettings() FuturesSettings +} + +type FuturesSettings struct { + IsFutures bool + IsIsolatedFutures bool + IsolatedFuturesSymbol string +} + +func (s FuturesSettings) GetFuturesSettings() FuturesSettings { + return s +} + +func (s *FuturesSettings) UseFutures() { + s.IsFutures = true +} + +func (s *FuturesSettings) UseIsolatedFutures(symbol string) { + s.IsFutures = true + s.IsIsolatedFutures = true + s.IsolatedFuturesSymbol = symbol +} + +// FuturesUserAsset define cross/isolated futures account asset +type FuturesUserAsset struct { + Asset string `json:"asset"` + InitialMargin fixedpoint.Value `json:"initialMargin"` + MaintMargin fixedpoint.Value `json:"maintMargin"` + MarginBalance fixedpoint.Value `json:"marginBalance"` + MaxWithdrawAmount fixedpoint.Value `json:"maxWithdrawAmount"` + OpenOrderInitialMargin fixedpoint.Value `json:"openOrderInitialMargin"` + PositionInitialMargin fixedpoint.Value `json:"positionInitialMargin"` + UnrealizedProfit fixedpoint.Value `json:"unrealizedProfit"` + WalletBalance fixedpoint.Value `json:"walletBalance"` +} + type MarginExchange interface { UseMargin() UseIsolatedMargin(symbol string) GetMarginSettings() MarginSettings - // QueryMarginAccount(ctx context.Context) (*binance.MarginAccount, error) +} + +// MarginBorrowRepayService provides repay and borrow actions of an crypto exchange +type MarginBorrowRepayService interface { + RepayMarginAsset(ctx context.Context, asset string, amount fixedpoint.Value) error + BorrowMarginAsset(ctx context.Context, asset string, amount fixedpoint.Value) error + QueryMarginAssetMaxBorrowable(ctx context.Context, asset string) (amount fixedpoint.Value, err error) +} + +type MarginInterest struct { + GID uint64 `json:"gid" db:"gid"` + Exchange ExchangeName `json:"exchange" db:"exchange"` + Asset string `json:"asset" db:"asset"` + Principle fixedpoint.Value `json:"principle" db:"principle"` + Interest fixedpoint.Value `json:"interest" db:"interest"` + InterestRate fixedpoint.Value `json:"interestRate" db:"interest_rate"` + IsolatedSymbol string `json:"isolatedSymbol" db:"isolated_symbol"` + Time Time `json:"time" db:"time"` +} + +type MarginLoan struct { + GID uint64 `json:"gid" db:"gid"` + Exchange ExchangeName `json:"exchange" db:"exchange"` + TransactionID uint64 `json:"transactionID" db:"transaction_id"` + Asset string `json:"asset" db:"asset"` + Principle fixedpoint.Value `json:"principle" db:"principle"` + Time Time `json:"time" db:"time"` + IsolatedSymbol string `json:"isolatedSymbol" db:"isolated_symbol"` +} + +type MarginRepay struct { + GID uint64 `json:"gid" db:"gid"` + Exchange ExchangeName `json:"exchange" db:"exchange"` + TransactionID uint64 `json:"transactionID" db:"transaction_id"` + Asset string `json:"asset" db:"asset"` + Principle fixedpoint.Value `json:"principle" db:"principle"` + Time Time `json:"time" db:"time"` + IsolatedSymbol string `json:"isolatedSymbol" db:"isolated_symbol"` +} + +type MarginLiquidation struct { + GID uint64 `json:"gid" db:"gid"` + Exchange ExchangeName `json:"exchange" db:"exchange"` + AveragePrice fixedpoint.Value `json:"averagePrice" db:"average_price"` + ExecutedQuantity fixedpoint.Value `json:"executedQuantity" db:"executed_quantity"` + OrderID uint64 `json:"orderID" db:"order_id"` + Price fixedpoint.Value `json:"price" db:"price"` + Quantity fixedpoint.Value `json:"quantity" db:"quantity"` + Side SideType `json:"side" db:"side"` + Symbol string `json:"symbol" db:"symbol"` + TimeInForce TimeInForce `json:"timeInForce" db:"time_in_force"` + IsIsolated bool `json:"isIsolated" db:"is_isolated"` + UpdatedTime Time `json:"updatedTime" db:"time"` +} + +// MarginHistory provides the service of querying loan history and repay history +type MarginHistory interface { + QueryLoanHistory(ctx context.Context, asset string, startTime, endTime *time.Time) ([]MarginLoan, error) + QueryRepayHistory(ctx context.Context, asset string, startTime, endTime *time.Time) ([]MarginRepay, error) + QueryLiquidationHistory(ctx context.Context, startTime, endTime *time.Time) ([]MarginLiquidation, error) + QueryInterestHistory(ctx context.Context, asset string, startTime, endTime *time.Time) ([]MarginInterest, error) } type MarginSettings struct { @@ -26,3 +132,66 @@ func (e *MarginSettings) UseIsolatedMargin(symbol string) { e.IsIsolatedMargin = true e.IsolatedMarginSymbol = symbol } + +// MarginAccount is for the cross margin account +type MarginAccount struct { + BorrowEnabled bool `json:"borrowEnabled"` + MarginLevel fixedpoint.Value `json:"marginLevel"` + TotalAssetOfBTC fixedpoint.Value `json:"totalAssetOfBtc"` + TotalLiabilityOfBTC fixedpoint.Value `json:"totalLiabilityOfBtc"` + TotalNetAssetOfBTC fixedpoint.Value `json:"totalNetAssetOfBtc"` + TradeEnabled bool `json:"tradeEnabled"` + TransferEnabled bool `json:"transferEnabled"` + UserAssets []MarginUserAsset `json:"userAssets"` +} + +// MarginUserAsset define user assets of margin account +type MarginUserAsset struct { + Asset string `json:"asset"` + Borrowed fixedpoint.Value `json:"borrowed"` + Free fixedpoint.Value `json:"free"` + Interest fixedpoint.Value `json:"interest"` + Locked fixedpoint.Value `json:"locked"` + NetAsset fixedpoint.Value `json:"netAsset"` +} + +// IsolatedMarginAccount defines isolated user assets of margin account +type IsolatedMarginAccount struct { + TotalAssetOfBTC fixedpoint.Value `json:"totalAssetOfBtc"` + TotalLiabilityOfBTC fixedpoint.Value `json:"totalLiabilityOfBtc"` + TotalNetAssetOfBTC fixedpoint.Value `json:"totalNetAssetOfBtc"` + Assets IsolatedMarginAssetMap `json:"assets"` +} + +// IsolatedMarginAsset defines isolated margin asset information, like margin level, liquidation price... etc +type IsolatedMarginAsset struct { + Symbol string `json:"symbol"` + QuoteAsset IsolatedUserAsset `json:"quoteAsset"` + BaseAsset IsolatedUserAsset `json:"baseAsset"` + + IsolatedCreated bool `json:"isolatedCreated"` + MarginLevel fixedpoint.Value `json:"marginLevel"` + MarginLevelStatus string `json:"marginLevelStatus"` + + MarginRatio fixedpoint.Value `json:"marginRatio"` + IndexPrice fixedpoint.Value `json:"indexPrice"` + LiquidatePrice fixedpoint.Value `json:"liquidatePrice"` + LiquidateRate fixedpoint.Value `json:"liquidateRate"` + + TradeEnabled bool `json:"tradeEnabled"` +} + +// IsolatedUserAsset defines isolated user assets of the margin account +type IsolatedUserAsset struct { + Asset string `json:"asset"` + Borrowed fixedpoint.Value `json:"borrowed"` + Free fixedpoint.Value `json:"free"` + Interest fixedpoint.Value `json:"interest"` + Locked fixedpoint.Value `json:"locked"` + NetAsset fixedpoint.Value `json:"netAsset"` + NetAssetOfBtc fixedpoint.Value `json:"netAssetOfBtc"` + + BorrowEnabled bool `json:"borrowEnabled"` + RepayEnabled bool `json:"repayEnabled"` + TotalAsset fixedpoint.Value `json:"totalAsset"` +} diff --git a/pkg/types/market.go b/pkg/types/market.go index 3ab7a3b983..665c286b0e 100644 --- a/pkg/types/market.go +++ b/pkg/types/market.go @@ -4,8 +4,11 @@ import ( "encoding/json" "fmt" "math" - "strconv" "time" + + "github.com/leekchan/accounting" + + "github.com/c9s/bbgo/pkg/fixedpoint" ) type Duration time.Duration @@ -47,67 +50,132 @@ func (d *Duration) UnmarshalJSON(data []byte) error { } type Market struct { - Symbol string - PricePrecision int - VolumePrecision int - QuoteCurrency string - BaseCurrency string + Symbol string `json:"symbol"` + + // LocalSymbol is used for exchange's API (exchange package internal) + LocalSymbol string `json:"localSymbol,omitempty"` + + // PricePrecision is the precision used for formatting price, 8 = 8 decimals + // can be converted from price tick step size, e.g. + // int(math.Log10(price step size)) + PricePrecision int `json:"pricePrecision"` - // The MIN_NOTIONAL filter defines the minimum notional value allowed for an order on a symbol. An order's notional value is the price * quantity - MinNotional float64 - MinAmount float64 + // VolumePrecision is the precision used for formatting quantity and volume, 8 = 8 decimals + // can be converted from step size, e.g. + // int(math.Log10(quantity step size)) + VolumePrecision int `json:"volumePrecision"` + + // QuoteCurrency is the currency name for quote, e.g. USDT in BTC/USDT, USDC in BTC/USDC + QuoteCurrency string `json:"quoteCurrency"` + + // BaseCurrency is the current name for base, e.g. BTC in BTC/USDT, ETH in ETH/USDC + BaseCurrency string `json:"baseCurrency"` + + // The MIN_NOTIONAL filter defines the minimum notional value allowed for an order on a symbol. + // An order's notional value is the price * quantity + MinNotional fixedpoint.Value `json:"minNotional,omitempty"` + MinAmount fixedpoint.Value `json:"minAmount,omitempty"` // The LOT_SIZE filter defines the quantity - MinLot float64 - MinQuantity float64 - MaxQuantity float64 + MinQuantity fixedpoint.Value `json:"minQuantity,omitempty"` + + // MaxQuantity is currently not used in the code + MaxQuantity fixedpoint.Value `json:"maxQuantity,omitempty"` + + // StepSize is the step size of quantity + // can be converted from precision, e.g. + // 1.0 / math.Pow10(m.BaseUnitPrecision) + StepSize fixedpoint.Value `json:"stepSize,omitempty"` + + MinPrice fixedpoint.Value `json:"minPrice,omitempty"` + MaxPrice fixedpoint.Value `json:"maxPrice,omitempty"` + + // TickSize is the step size of price + TickSize fixedpoint.Value `json:"tickSize,omitempty"` +} + +func (m Market) IsDustQuantity(quantity, price fixedpoint.Value) bool { + return quantity.Compare(m.MinQuantity) <= 0 || quantity.Mul(price).Compare(m.MinNotional) <= 0 +} + +// TruncateQuantity uses the step size to truncate floating number, in order to avoid the rounding issue +func (m Market) TruncateQuantity(quantity fixedpoint.Value) fixedpoint.Value { + stepRound := math.Pow10(-int(math.Log10(m.StepSize.Float64()))) + return fixedpoint.NewFromFloat(math.Trunc(quantity.Float64()*stepRound) / stepRound) +} - MinPrice float64 - MaxPrice float64 - TickSize float64 +func (m Market) BaseCurrencyFormatter() *accounting.Accounting { + a := accounting.DefaultAccounting(m.BaseCurrency, m.VolumePrecision) + a.Format = "%v %s" + return a } -func (m Market) FormatPriceCurrency(val float64) string { +func (m Market) QuoteCurrencyFormatter() *accounting.Accounting { + var format, symbol string + + switch m.QuoteCurrency { + case "USDT", "USDC", "USD": + symbol = "$" + format = "%s %v" + + default: + symbol = m.QuoteCurrency + format = "%v %s" + } + + a := accounting.DefaultAccounting(symbol, m.PricePrecision) + a.Format = format + return a +} + +func (m Market) FormatPriceCurrency(val fixedpoint.Value) string { switch m.QuoteCurrency { case "USD", "USDT": - return USD.FormatMoneyFloat64(val) + return USD.FormatMoney(val) case "BTC": - return BTC.FormatMoneyFloat64(val) + return BTC.FormatMoney(val) case "BNB": - return BNB.FormatMoneyFloat64(val) + return BNB.FormatMoney(val) } return m.FormatPrice(val) } -func (m Market) FormatPrice(val float64) string { +func (m Market) FormatPrice(val fixedpoint.Value) string { // p := math.Pow10(m.PricePrecision) - prec := int(math.Abs(math.Log10(m.MinPrice))) - p := math.Pow10(prec) - val = math.Trunc(val*p) / p - return strconv.FormatFloat(val, 'f', prec, 64) + return formatPrice(val, m.TickSize) } -func (m Market) FormatQuantity(val float64) string { - prec := int(math.Abs(math.Log10(m.MinLot))) - p := math.Pow10(prec) - val = math.Trunc(val*p) / p - return strconv.FormatFloat(val, 'f', prec, 64) +func formatPrice(price fixedpoint.Value, tickSize fixedpoint.Value) string { + prec := int(math.Round(math.Abs(math.Log10(tickSize.Float64())))) + return price.FormatString(prec) } -func (m Market) FormatVolume(val float64) string { - p := math.Pow10(m.VolumePrecision) - val = math.Trunc(val*p) / p - return strconv.FormatFloat(val, 'f', m.VolumePrecision, 64) +func (m Market) FormatQuantity(val fixedpoint.Value) string { + return formatQuantity(val, m.StepSize) +} + +func formatQuantity(quantity fixedpoint.Value, lot fixedpoint.Value) string { + prec := int(math.Round(math.Abs(math.Log10(lot.Float64())))) + return quantity.FormatString(prec) } -func (m Market) CanonicalizeVolume(val float64) float64 { +func (m Market) FormatVolume(val fixedpoint.Value) string { + return val.FormatString(m.VolumePrecision) +} + +func (m Market) CanonicalizeVolume(val fixedpoint.Value) float64 { + // TODO Round p := math.Pow10(m.VolumePrecision) - return math.Trunc(p*val) / p + return math.Trunc(p*val.Float64()) / p } type MarketMap map[string]Market + +func (m MarketMap) Add(market Market) { + m[market.Symbol] = market +} diff --git a/pkg/types/market_test.go b/pkg/types/market_test.go index dd1225f2f0..d0544e9ba3 100644 --- a/pkg/types/market_test.go +++ b/pkg/types/market_test.go @@ -2,12 +2,39 @@ package types import ( "encoding/json" + "regexp" "testing" "time" "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" ) +var s func(string) fixedpoint.Value = fixedpoint.MustNewFromString + +func TestFormatQuantity(t *testing.T) { + quantity := formatQuantity( + s("0.12511"), + s("0.01")) + assert.Equal(t, "0.12", quantity) + + quantity = formatQuantity( + s("0.12511"), + s("0.001")) + assert.Equal(t, "0.125", quantity) +} + +func TestFormatPrice(t *testing.T) { + price := formatPrice( + s("26.288256"), + s("0.0001")) + assert.Equal(t, "26.2882", price) + + price = formatPrice(s("26.288656"), s("0.001")) + assert.Equal(t, "26.288", price) +} + func TestDurationParse(t *testing.T) { type A struct { Duration Duration `json:"duration"` @@ -28,7 +55,7 @@ func TestDurationParse(t *testing.T) { { name: "float64 to second", input: `{ "duration": 1.1 }`, - expected: Duration(time.Second + 100 * time.Millisecond), + expected: Duration(time.Second + 100*time.Millisecond), }, { name: "2m", @@ -38,7 +65,7 @@ func TestDurationParse(t *testing.T) { { name: "2m3s", input: `{ "duration": "2m3s" }`, - expected: Duration(2 * time.Minute + 3 * time.Second), + expected: Duration(2*time.Minute + 3*time.Second), }, } for _, test := range tests { @@ -50,3 +77,117 @@ func TestDurationParse(t *testing.T) { }) } } + +func Test_formatPrice(t *testing.T) { + type args struct { + price fixedpoint.Value + tickSize fixedpoint.Value + } + tests := []struct { + name string + args args + want string + }{ + { + name: "no fraction", + args: args{ + price: fixedpoint.NewFromFloat(10.0), + tickSize: fixedpoint.NewFromFloat(0.001), + }, + want: "10.000", + }, + { + name: "fraction truncate", + args: args{ + price: fixedpoint.NewFromFloat(2.334), + tickSize: fixedpoint.NewFromFloat(0.01), + }, + want: "2.33", + }, + { + name: "fraction", + args: args{ + price: fixedpoint.NewFromFloat(2.334), + tickSize: fixedpoint.NewFromFloat(0.0001), + }, + want: "2.3340", + }, + { + name: "more fraction", + args: args{ + price: fixedpoint.MustNewFromString("2.1234567898888"), + tickSize: fixedpoint.NewFromFloat(0.0001), + }, + want: "2.1234", + }, + } + + binanceFormatRE := regexp.MustCompile("^([0-9]{1,20})(.[0-9]{1,20})?$") + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := formatPrice(tt.args.price, tt.args.tickSize) + if got != tt.want { + t.Errorf("formatPrice() = %v, want %v", got, tt.want) + } + + assert.Regexp(t, binanceFormatRE, got) + }) + } +} + +func Test_formatQuantity(t *testing.T) { + type args struct { + quantity fixedpoint.Value + tickSize fixedpoint.Value + } + tests := []struct { + name string + args args + want string + }{ + { + name: "no fraction", + args: args{ + quantity: fixedpoint.NewFromFloat(10.0), + tickSize: fixedpoint.NewFromFloat(0.001), + }, + want: "10.000", + }, + { + name: "fraction truncate", + args: args{ + quantity: fixedpoint.NewFromFloat(2.334), + tickSize: fixedpoint.NewFromFloat(0.01), + }, + want: "2.33", + }, + { + name: "fraction", + args: args{ + quantity: fixedpoint.NewFromFloat(2.334), + tickSize: fixedpoint.NewFromFloat(0.0001), + }, + want: "2.3340", + }, + { + name: "more fraction", + args: args{ + quantity: fixedpoint.MustNewFromString("2.1234567898888"), + tickSize: fixedpoint.NewFromFloat(0.0001), + }, + want: "2.1234", + }, + } + + binanceFormatRE := regexp.MustCompile("^([0-9]{1,20})(.[0-9]{1,20})?$") + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := formatQuantity(tt.args.quantity, tt.args.tickSize) + if got != tt.want { + t.Errorf("formatQuantity() = %v, want %v", got, tt.want) + } + + assert.Regexp(t, binanceFormatRE, got) + }) + } +} diff --git a/pkg/types/order.go b/pkg/types/order.go index 3e4e042f2e..f560e165a6 100644 --- a/pkg/types/order.go +++ b/pkg/types/order.go @@ -1,11 +1,16 @@ package types import ( + "encoding/json" "fmt" + "strconv" + "strings" "time" + "github.com/pkg/errors" "github.com/slack-go/slack" + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/util" ) @@ -15,6 +20,14 @@ func init() { _ = PlainText(&Order{}) } +type TimeInForce string + +var ( + TimeInForceGTC TimeInForce = "GTC" + TimeInForceIOC TimeInForce = "IOC" + TimeInForceFOK TimeInForce = "FOK" +) + // MarginOrderSideEffectType define side effect type for orders type MarginOrderSideEffectType string @@ -24,11 +37,38 @@ var ( SideEffectTypeAutoRepay MarginOrderSideEffectType = "AUTO_REPAY" ) +func (t *MarginOrderSideEffectType) UnmarshalJSON(data []byte) error { + var s string + var err = json.Unmarshal(data, &s) + if err != nil { + return errors.Wrapf(err, "unable to unmarshal side effect type: %s", data) + } + + switch strings.ToUpper(s) { + + case string(SideEffectTypeNoSideEffect), "": + *t = SideEffectTypeNoSideEffect + return nil + + case string(SideEffectTypeMarginBuy), "BORROW", "MARGINBUY": + *t = SideEffectTypeMarginBuy + return nil + + case string(SideEffectTypeAutoRepay), "REPAY", "AUTOREPAY": + *t = SideEffectTypeAutoRepay + return nil + + } + + return fmt.Errorf("invalid side effect type: %s", data) +} + // OrderType define order type type OrderType string const ( OrderTypeLimit OrderType = "LIMIT" + OrderTypeLimitMaker OrderType = "LIMIT_MAKER" OrderTypeMarket OrderType = "MARKET" OrderTypeStopLimit OrderType = "STOP_LIMIT" OrderTypeStopMarket OrderType = "STOP_MARKET" @@ -48,14 +88,25 @@ func (t *OrderType) Scan(v interface{}) error { } */ +const NoClientOrderID = "0" + type OrderStatus string const ( - OrderStatusNew OrderStatus = "NEW" - OrderStatusFilled OrderStatus = "FILLED" + // OrderStatusNew means the order is active on the orderbook without any filling. + OrderStatusNew OrderStatus = "NEW" + + // OrderStatusFilled means the order is fully-filled, it's an end state. + OrderStatusFilled OrderStatus = "FILLED" + + // OrderStatusPartiallyFilled means the order is partially-filled, it's an end state, the order might be canceled in the end. OrderStatusPartiallyFilled OrderStatus = "PARTIALLY_FILLED" - OrderStatusCanceled OrderStatus = "CANCELED" - OrderStatusRejected OrderStatus = "REJECTED" + + // OrderStatusCanceled means the order is canceled without partially filled or filled. + OrderStatusCanceled OrderStatus = "CANCELED" + + // OrderStatusRejected means the order is not placed successfully, it's rejected by the api + OrderStatusRejected OrderStatus = "REJECTED" ) type SubmitOrder struct { @@ -65,41 +116,72 @@ type SubmitOrder struct { Side SideType `json:"side" db:"side"` Type OrderType `json:"orderType" db:"order_type"` - Quantity float64 `json:"quantity" db:"quantity"` - Price float64 `json:"price" db:"price"` - StopPrice float64 `json:"stopPrice" db:"stop_price"` + Quantity fixedpoint.Value `json:"quantity" db:"quantity"` + Price fixedpoint.Value `json:"price" db:"price"` + StopPrice fixedpoint.Value `json:"stopPrice,omitempty" db:"stop_price"` - Market Market `json:"market" db:"-"` + Market Market `json:"-" db:"-"` - // TODO: we can probably remove these field - StopPriceString string `json:"-"` - PriceString string `json:"-"` - QuantityString string `json:"-"` + TimeInForce TimeInForce `json:"timeInForce,omitempty" db:"time_in_force"` // GTC, IOC, FOK - TimeInForce string `json:"timeInForce" db:"time_in_force"` // GTC, IOC, FOK + GroupID uint32 `json:"groupID,omitempty"` - GroupID int64 `json:"groupID"` + MarginSideEffect MarginOrderSideEffectType `json:"marginSideEffect,omitempty"` // AUTO_REPAY = repay, MARGIN_BUY = borrow, defaults to NO_SIDE_EFFECT - MarginSideEffect MarginOrderSideEffectType `json:"marginSideEffect"` // AUTO_REPAY = repay, MARGIN_BUY = borrow, defaults to NO_SIDE_EFFECT + // futures order fields + IsFutures bool `json:"is_futures" db:"is_futures"` + ReduceOnly bool `json:"reduceOnly" db:"reduce_only"` + ClosePosition bool `json:"closePosition" db:"close_position"` } -func (o *SubmitOrder) String() string { - return fmt.Sprintf("SubmitOrder %s %s %s %f @ %f", o.Symbol, o.Type, o.Side, o.Quantity, o.Price) +func (o SubmitOrder) String() string { + switch o.Type { + case OrderTypeMarket: + return fmt.Sprintf("SubmitOrder %s %s %s %s", o.Symbol, o.Type, o.Side, o.Quantity.String()) + } + + return fmt.Sprintf("SubmitOrder %s %s %s %s @ %s", o.Symbol, o.Type, o.Side, o.Quantity.String(), o.Price.String()) } -func (o *SubmitOrder) PlainText() string { - return fmt.Sprintf("SubmitOrder %s %s %s %f @ %f", o.Symbol, o.Type, o.Side, o.Quantity, o.Price) +func (o SubmitOrder) PlainText() string { + switch o.Type { + case OrderTypeMarket: + return fmt.Sprintf("SubmitOrder %s %s %s %s", o.Symbol, o.Type, o.Side, o.Quantity.String()) + } + + return fmt.Sprintf("SubmitOrder %s %s %s %s @ %s", o.Symbol, o.Type, o.Side, o.Quantity.String(), o.Price.String()) } -func (o *SubmitOrder) SlackAttachment() slack.Attachment { +func (o SubmitOrder) SlackAttachment() slack.Attachment { var fields = []slack.AttachmentField{ {Title: "Symbol", Value: o.Symbol, Short: true}, {Title: "Side", Value: string(o.Side), Short: true}, - {Title: "Volume", Value: o.QuantityString, Short: true}, + {Title: "Price", Value: o.Price.String(), Short: true}, + {Title: "Quantity", Value: o.Quantity.String(), Short: true}, + } + + if o.Price.Sign() > 0 && o.Quantity.Sign() > 0 && len(o.Market.QuoteCurrency) > 0 { + if IsFiatCurrency(o.Market.QuoteCurrency) { + fields = append(fields, slack.AttachmentField{ + Title: "Amount", + Value: USD.FormatMoney(o.Price.Mul(o.Quantity)), + Short: true, + }) + } else { + fields = append(fields, slack.AttachmentField{ + Title: "Amount", + Value: fmt.Sprintf("%s %s", o.Price.Mul(o.Quantity).String(), o.Market.QuoteCurrency), + Short: true, + }) + } } - if len(o.PriceString) > 0 { - fields = append(fields, slack.AttachmentField{Title: "Price", Value: o.PriceString, Short: true}) + if len(o.ClientOrderID) > 0 { + fields = append(fields, slack.AttachmentField{Title: "ClientOrderID", Value: o.ClientOrderID, Short: true}) + } + + if len(o.MarginSideEffect) > 0 { + fields = append(fields, slack.AttachmentField{Title: "MarginSideEffect", Value: string(o.MarginSideEffect), Short: true}) } return slack.Attachment{ @@ -110,33 +192,153 @@ func (o *SubmitOrder) SlackAttachment() slack.Attachment { } } +type OrderQuery struct { + Symbol string + OrderID string + ClientOrderID string +} + type Order struct { SubmitOrder - Exchange string `json:"exchange" db:"exchange"` - GID uint64 `json:"gid" db:"gid"` - OrderID uint64 `json:"orderID" db:"order_id"` // order id - Status OrderStatus `json:"status" db:"status"` - ExecutedQuantity float64 `json:"executedQuantity" db:"executed_quantity"` - IsWorking bool `json:"isWorking" db:"is_working"` - CreationTime time.Time `json:"creationTime" db:"created_at"` - UpdateTime time.Time `json:"updateTime" db:"updated_at"` + Exchange ExchangeName `json:"exchange" db:"exchange"` + + // GID is used for relational database storage, it's an incremental ID + GID uint64 `json:"gid" db:"gid"` + OrderID uint64 `json:"orderID" db:"order_id"` // order id + UUID string `json:"uuid,omitempty"` + + Status OrderStatus `json:"status" db:"status"` + ExecutedQuantity fixedpoint.Value `json:"executedQuantity" db:"executed_quantity"` + IsWorking bool `json:"isWorking" db:"is_working"` + CreationTime Time `json:"creationTime" db:"created_at"` + UpdateTime Time `json:"updateTime" db:"updated_at"` IsMargin bool `json:"isMargin" db:"is_margin"` IsIsolated bool `json:"isIsolated" db:"is_isolated"` } +func (o Order) CsvHeader() []string { + return []string{ + "order_id", + "symbol", + "side", + "order_type", + "status", + "price", + "quantity", + "creation_time", + "update_time", + } +} + +func (o Order) CsvRecords() [][]string { + return [][]string{ + { + strconv.FormatUint(o.OrderID, 10), + o.Symbol, + string(o.Side), + string(o.Type), + string(o.Status), + o.Price.String(), + o.Quantity.String(), + o.CreationTime.Time().UTC().Format(time.RFC1123), + o.UpdateTime.Time().UTC().Format(time.RFC1123), + }, + } +} + +// Backup backs up the current order quantity to a SubmitOrder object +// so that we can post the order later when we want to restore the orders. +func (o Order) Backup() SubmitOrder { + so := o.SubmitOrder + so.Quantity = o.Quantity.Sub(o.ExecutedQuantity) + + // ClientOrderID can not be reused + so.ClientOrderID = "" + return so +} + func (o Order) String() string { - return fmt.Sprintf("order %s %f/%f at %f -> %s", o.Side, o.ExecutedQuantity, o.Quantity, o.Price, o.Status) + var orderID string + if o.UUID != "" { + orderID = fmt.Sprintf("UUID %s (%d)", o.UUID, o.OrderID) + } else { + orderID = strconv.FormatUint(o.OrderID, 10) + } + + return fmt.Sprintf("ORDER %s | %s | %s | %s | %s %-4s | %s/%s @ %s | %s", + o.Exchange.String(), + o.CreationTime.Time().Local().Format(time.RFC1123), + orderID, + o.Symbol, + o.Type, + o.Side, + o.ExecutedQuantity.String(), + o.Quantity.String(), + o.Price.String(), + o.Status) } +// PlainText is used for telegram-styled messages func (o Order) PlainText() string { - return fmt.Sprintf("%s %s Order %s %s price %s, quantity %s/%s status %s", - o.Exchange, - o.Type, + return fmt.Sprintf("Order %s %s %s %s @ %s %s/%s -> %s", + o.Exchange.String(), o.Symbol, + o.Type, o.Side, - util.FormatFloat(o.Price, 2), - util.FormatFloat(o.ExecutedQuantity, 2), - util.FormatFloat(o.Quantity, 4), o.Status) + o.Price.FormatString(2), + o.ExecutedQuantity.FormatString(2), + o.Quantity.FormatString(4), + o.Status) +} + +func (o Order) SlackAttachment() slack.Attachment { + var fields = []slack.AttachmentField{ + {Title: "Symbol", Value: o.Symbol, Short: true}, + {Title: "Side", Value: string(o.Side), Short: true}, + {Title: "Price", Value: o.Price.String(), Short: true}, + { + Title: "Executed Quantity", + Value: o.ExecutedQuantity.String() + "/" + o.Quantity.String(), + Short: true, + }, + } + + fields = append(fields, slack.AttachmentField{ + Title: "ID", + Value: strconv.FormatUint(o.OrderID, 10), + Short: true, + }) + + orderStatusIcon := "" + + switch o.Status { + case OrderStatusNew: + orderStatusIcon = ":new:" + case OrderStatusCanceled: + orderStatusIcon = ":eject:" + case OrderStatusPartiallyFilled: + orderStatusIcon = ":arrow_forward:" + case OrderStatusFilled: + orderStatusIcon = ":white_check_mark:" + + } + + fields = append(fields, slack.AttachmentField{ + Title: "Status", + Value: string(o.Status) + " " + orderStatusIcon, + Short: true, + }) + + footerIcon := ExchangeFooterIcon(o.Exchange) + + return slack.Attachment{ + Color: SideToColorName(o.Side), + Title: string(o.Type) + " Order " + string(o.Side), + // Text: "", + Fields: fields, + FooterIcon: footerIcon, + Footer: strings.ToLower(o.Exchange.String()) + util.Render(" creation time {{ . }}", o.CreationTime.Time().Format(time.StampMilli)), + } } diff --git a/pkg/types/orderbook.go b/pkg/types/orderbook.go index 66c1c05b98..8bae186c80 100644 --- a/pkg/types/orderbook.go +++ b/pkg/types/orderbook.go @@ -1,264 +1,117 @@ package types import ( - "fmt" - "sort" + "os" + "strconv" "sync" + "time" "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/sigchan" ) -type PriceVolume struct { - Price fixedpoint.Value - Volume fixedpoint.Value +type OrderBook interface { + Spread() (fixedpoint.Value, bool) + BestAsk() (PriceVolume, bool) + BestBid() (PriceVolume, bool) + LastUpdateTime() time.Time + Reset() + Load(book SliceOrderBook) + Update(book SliceOrderBook) + Copy() OrderBook + SideBook(sideType SideType) PriceVolumeSlice + CopyDepth(depth int) OrderBook + IsValid() (bool, error) } -func (p PriceVolume) String() string { - return fmt.Sprintf("PriceVolume{ price: %f, volume: %f }", p.Price.Float64(), p.Volume.Float64()) -} - -type PriceVolumeSlice []PriceVolume - -func (slice PriceVolumeSlice) Len() int { return len(slice) } -func (slice PriceVolumeSlice) Less(i, j int) bool { return slice[i].Price < slice[j].Price } -func (slice PriceVolumeSlice) Swap(i, j int) { slice[i], slice[j] = slice[j], slice[i] } - -// Trim removes the pairs that volume = 0 -func (slice PriceVolumeSlice) Trim() (pvs PriceVolumeSlice) { - for _, pv := range slice { - if pv.Volume > 0 { - pvs = append(pvs, pv) - } - } - - return pvs -} +type MutexOrderBook struct { + sync.Mutex -func (slice PriceVolumeSlice) Copy() PriceVolumeSlice { - // this is faster than make (however it's only for simple types) - return append(slice[:0:0], slice...) + Symbol string + OrderBook OrderBook } -func (slice PriceVolumeSlice) First() (PriceVolume, bool) { - if len(slice) > 0 { - return slice[0], true - } - return PriceVolume{}, false -} +func NewMutexOrderBook(symbol string) *MutexOrderBook { + var book OrderBook = NewSliceOrderBook(symbol) -func (slice PriceVolumeSlice) IndexByVolumeDepth(requiredVolume fixedpoint.Value) int { - var tv int64 = 0 - for x, el := range slice { - tv += el.Volume.Int64() - if tv >= requiredVolume.Int64() { - return x - } + if v, _ := strconv.ParseBool(os.Getenv("ENABLE_RBT_ORDERBOOK")); v { + book = NewRBOrderBook(symbol) } - // not deep enough - return -1 -} - -func (slice PriceVolumeSlice) InsertAt(idx int, pv PriceVolume) PriceVolumeSlice { - rear := append([]PriceVolume{}, slice[idx:]...) - newSlice := append(slice[:idx], pv) - return append(newSlice, rear...) -} - -func (slice PriceVolumeSlice) Remove(price fixedpoint.Value, descending bool) PriceVolumeSlice { - matched, idx := slice.Find(price, descending) - if matched.Price != price { - return slice + return &MutexOrderBook{ + Symbol: symbol, + OrderBook: book, } - - return append(slice[:idx], slice[idx+1:]...) } -// FindPriceVolumePair finds the pair by the given price, this function is a read-only -// operation, so we use the value receiver to avoid copy value from the pointer -// If the price is not found, it will return the index where the price can be inserted at. -// true for descending (bid orders), false for ascending (ask orders) -func (slice PriceVolumeSlice) Find(price fixedpoint.Value, descending bool) (pv PriceVolume, idx int) { - idx = sort.Search(len(slice), func(i int) bool { - if descending { - return slice[i].Price <= price - } - return slice[i].Price >= price - }) - - if idx >= len(slice) || slice[idx].Price != price { - return pv, idx - } - - pv = slice[idx] - - return pv, idx +func (b *MutexOrderBook) IsValid() (ok bool, err error) { + b.Lock() + ok, err = b.OrderBook.IsValid() + b.Unlock() + return ok, err } -func (slice PriceVolumeSlice) Upsert(pv PriceVolume, descending bool) PriceVolumeSlice { - if len(slice) == 0 { - return append(slice, pv) - } - - price := pv.Price - _, idx := slice.Find(price, descending) - if idx >= len(slice) || slice[idx].Price != price { - return slice.InsertAt(idx, pv) - } - - slice[idx].Volume = pv.Volume - return slice +func (b *MutexOrderBook) LastUpdateTime() time.Time { + b.Lock() + t := b.OrderBook.LastUpdateTime() + b.Unlock() + return t } -//go:generate callbackgen -type OrderBook -type OrderBook struct { - Symbol string - Bids PriceVolumeSlice - Asks PriceVolumeSlice - - loadCallbacks []func(book *OrderBook) - updateCallbacks []func(book *OrderBook) - bidsChangeCallbacks []func(pvs PriceVolumeSlice) - asksChangeCallbacks []func(pvs PriceVolumeSlice) +func (b *MutexOrderBook) BestBidAndAsk() (bid, ask PriceVolume, ok bool) { + var ok1, ok2 bool + b.Lock() + bid, ok1 = b.OrderBook.BestBid() + ask, ok2 = b.OrderBook.BestAsk() + b.Unlock() + ok = ok1 && ok2 + return bid, ask, ok } -func (b *OrderBook) BestBid() (PriceVolume, bool) { - if len(b.Bids) == 0 { - return PriceVolume{}, false - } - - return b.Bids[0], true +func (b *MutexOrderBook) BestBid() (pv PriceVolume, ok bool) { + b.Lock() + pv, ok = b.OrderBook.BestBid() + b.Unlock() + return pv, ok } -func (b *OrderBook) BestAsk() (PriceVolume, bool) { - if len(b.Asks) == 0 { - return PriceVolume{}, false - } - - return b.Asks[0], true +func (b *MutexOrderBook) BestAsk() (pv PriceVolume, ok bool) { + b.Lock() + pv, ok = b.OrderBook.BestAsk() + b.Unlock() + return pv, ok } -func (b *OrderBook) IsValid() bool { - bid, hasBid := b.BestBid() - ask, hasAsk := b.BestAsk() - - if !hasBid || !hasAsk { - return false - } - - return bid.Price < ask.Price +func (b *MutexOrderBook) Load(book SliceOrderBook) { + b.Lock() + b.OrderBook.Load(book) + b.Unlock() } -func (b *OrderBook) PriceVolumesBySide(side SideType) PriceVolumeSlice { - switch side { - - case SideTypeBuy: - return b.Bids - - case SideTypeSell: - return b.Asks - } - - return nil +func (b *MutexOrderBook) Reset() { + b.Lock() + b.OrderBook.Reset() + b.Unlock() } -func (b *OrderBook) Copy() (book OrderBook) { - book = *b - book.Bids = b.Bids.Copy() - book.Asks = b.Asks.Copy() +func (b *MutexOrderBook) CopyDepth(depth int) OrderBook { + b.Lock() + book := b.OrderBook.CopyDepth(depth) + b.Unlock() return book } -func (b *OrderBook) updateAsks(pvs PriceVolumeSlice) { - for _, pv := range pvs { - if pv.Volume == 0 { - b.Asks = b.Asks.Remove(pv.Price, false) - } else { - b.Asks = b.Asks.Upsert(pv, false) - } - } - - b.EmitAsksChange(b.Asks) -} - -func (b *OrderBook) updateBids(pvs PriceVolumeSlice) { - for _, pv := range pvs { - if pv.Volume == 0 { - b.Bids = b.Bids.Remove(pv.Price, true) - } else { - b.Bids = b.Bids.Upsert(pv, true) - } - } - - b.EmitBidsChange(b.Bids) -} - -func (b *OrderBook) update(book OrderBook) { - b.updateBids(book.Bids) - b.updateAsks(book.Asks) -} - -func (b *OrderBook) Reset() { - b.Bids = nil - b.Asks = nil -} - -func (b *OrderBook) Load(book OrderBook) { - b.Reset() - b.update(book) - b.EmitLoad(b) -} - -func (b *OrderBook) Update(book OrderBook) { - b.update(book) - b.EmitUpdate(b) -} - -func (b *OrderBook) Print() { - fmt.Printf("BOOK %s\n", b.Symbol) - fmt.Printf("ASKS:\n") - for i := len(b.Asks) - 1; i >= 0; i-- { - fmt.Printf("- ASK: %s\n", b.Asks[i].String()) - } - - fmt.Printf("BIDS:\n") - for _, bid := range b.Bids { - fmt.Printf("- BID: %s\n", bid.String()) - } -} - -type MutexOrderBook struct { - sync.Mutex - - *OrderBook -} - -func NewMutexOrderBook(symbol string) *MutexOrderBook { - return &MutexOrderBook{ - OrderBook: &OrderBook{Symbol: symbol}, - } -} - -func (b *MutexOrderBook) Load(book OrderBook) { +func (b *MutexOrderBook) Copy() OrderBook { b.Lock() - defer b.Unlock() - - b.Reset() - b.update(book) - b.EmitLoad(b.OrderBook) -} - -func (b *MutexOrderBook) Get() OrderBook { - return b.OrderBook.Copy() + book := b.OrderBook.Copy() + b.Unlock() + return book } -func (b *MutexOrderBook) Update(book OrderBook) { +func (b *MutexOrderBook) Update(update SliceOrderBook) { b.Lock() - defer b.Unlock() - - b.update(book) - b.EmitUpdate(b.OrderBook) + b.OrderBook.Update(update) + b.Unlock() } // StreamOrderBook receives streaming data from websocket connection and @@ -277,8 +130,8 @@ func NewStreamBook(symbol string) *StreamOrderBook { } func (sb *StreamOrderBook) BindStream(stream Stream) { - stream.OnBookSnapshot(func(book OrderBook) { - if sb.Symbol != book.Symbol { + stream.OnBookSnapshot(func(book SliceOrderBook) { + if sb.MutexOrderBook.Symbol != book.Symbol { return } @@ -286,8 +139,8 @@ func (sb *StreamOrderBook) BindStream(stream Stream) { sb.C.Emit() }) - stream.OnBookUpdate(func(book OrderBook) { - if sb.Symbol != book.Symbol { + stream.OnBookUpdate(func(book SliceOrderBook) { + if sb.MutexOrderBook.Symbol != book.Symbol { return } diff --git a/pkg/types/orderbook_callbacks.go b/pkg/types/orderbook_callbacks.go deleted file mode 100644 index 292c064193..0000000000 --- a/pkg/types/orderbook_callbacks.go +++ /dev/null @@ -1,43 +0,0 @@ -// Code generated by "callbackgen -type OrderBook"; DO NOT EDIT. - -package types - -func (b *OrderBook) OnLoad(cb func(book *OrderBook)) { - b.loadCallbacks = append(b.loadCallbacks, cb) -} - -func (b *OrderBook) EmitLoad(book *OrderBook) { - for _, cb := range b.loadCallbacks { - cb(book) - } -} - -func (b *OrderBook) OnUpdate(cb func(book *OrderBook)) { - b.updateCallbacks = append(b.updateCallbacks, cb) -} - -func (b *OrderBook) EmitUpdate(book *OrderBook) { - for _, cb := range b.updateCallbacks { - cb(book) - } -} - -func (b *OrderBook) OnBidsChange(cb func(pvs PriceVolumeSlice)) { - b.bidsChangeCallbacks = append(b.bidsChangeCallbacks, cb) -} - -func (b *OrderBook) EmitBidsChange(pvs PriceVolumeSlice) { - for _, cb := range b.bidsChangeCallbacks { - cb(pvs) - } -} - -func (b *OrderBook) OnAsksChange(cb func(pvs PriceVolumeSlice)) { - b.asksChangeCallbacks = append(b.asksChangeCallbacks, cb) -} - -func (b *OrderBook) EmitAsksChange(pvs PriceVolumeSlice) { - for _, cb := range b.asksChangeCallbacks { - cb(pvs) - } -} diff --git a/pkg/types/orderbook_test.go b/pkg/types/orderbook_test.go new file mode 100644 index 0000000000..32e7d31b4e --- /dev/null +++ b/pkg/types/orderbook_test.go @@ -0,0 +1,138 @@ +package types + +import ( + "math/rand" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +func prepareOrderBookBenchmarkData() (asks, bids PriceVolumeSlice) { + for p := 0.0; p < 1000.0; p++ { + asks = append(asks, PriceVolume{fixedpoint.NewFromFloat(1000 + p), fixedpoint.One}) + bids = append(bids, PriceVolume{fixedpoint.NewFromFloat(1000 - 0.1 - p), fixedpoint.One}) + } + return +} + +func BenchmarkOrderBook_Load(b *testing.B) { + var asks, bids = prepareOrderBookBenchmarkData() + for p := 0.0; p < 1000.0; p++ { + asks = append(asks, PriceVolume{fixedpoint.NewFromFloat(1000 + p), fixedpoint.One}) + bids = append(bids, PriceVolume{fixedpoint.NewFromFloat(1000 - 0.1 - p), fixedpoint.One}) + } + + b.Run("RBTOrderBook", func(b *testing.B) { + book := NewRBOrderBook("ETHUSDT") + for i := 0; i < b.N; i++ { + for _, ask := range asks { + book.Asks.Upsert(ask.Price, ask.Volume) + } + for _, bid := range bids { + book.Bids.Upsert(bid.Price, bid.Volume) + } + } + }) + + b.Run("OrderBook", func(b *testing.B) { + book := &SliceOrderBook{} + for i := 0; i < b.N; i++ { + for _, ask := range asks { + book.Asks = book.Asks.Upsert(ask, false) + } + for _, bid := range bids { + book.Bids = book.Bids.Upsert(bid, true) + } + } + }) +} + +func BenchmarkOrderBook_UpdateAndInsert(b *testing.B) { + var asks, bids = prepareOrderBookBenchmarkData() + for p := 0.0; p < 1000.0; p += 2 { + asks = append(asks, PriceVolume{fixedpoint.NewFromFloat(1000 + p), fixedpoint.One}) + bids = append(bids, PriceVolume{fixedpoint.NewFromFloat(1000 - 0.1 - p), fixedpoint.One}) + } + + rbBook := NewRBOrderBook("ETHUSDT") + for _, ask := range asks { + rbBook.Asks.Upsert(ask.Price, ask.Volume) + } + for _, bid := range bids { + rbBook.Bids.Upsert(bid.Price, bid.Volume) + } + + b.Run("RBTOrderBook", func(b *testing.B) { + for i := 0; i < b.N; i++ { + var price = fixedpoint.NewFromFloat(rand.Float64() * 2000.0) + if price.Compare(fixedpoint.NewFromInt(1000)) >= 0 { + rbBook.Asks.Upsert(price, fixedpoint.One) + } else { + rbBook.Bids.Upsert(price, fixedpoint.One) + } + } + }) + + sliceBook := &SliceOrderBook{} + for i := 0; i < b.N; i++ { + for _, ask := range asks { + sliceBook.Asks = sliceBook.Asks.Upsert(ask, false) + } + for _, bid := range bids { + sliceBook.Bids = sliceBook.Bids.Upsert(bid, true) + } + } + b.Run("OrderBook", func(b *testing.B) { + for i := 0; i < b.N; i++ { + var price = fixedpoint.NewFromFloat(rand.Float64() * 2000.0) + if price.Compare(fixedpoint.NewFromFloat(1000)) >= 0 { + sliceBook.Asks = sliceBook.Asks.Upsert(PriceVolume{Price: price, Volume: fixedpoint.NewFromFloat(1)}, false) + } else { + sliceBook.Bids = sliceBook.Bids.Upsert(PriceVolume{Price: price, Volume: fixedpoint.NewFromFloat(1)}, true) + } + } + }) +} + +func TestOrderBook_IsValid(t *testing.T) { + ob := SliceOrderBook{ + Bids: PriceVolumeSlice{ + {fixedpoint.NewFromFloat(100.0), fixedpoint.NewFromFloat(1.5)}, + {fixedpoint.NewFromFloat(90.0), fixedpoint.NewFromFloat(2.5)}, + }, + + Asks: PriceVolumeSlice{ + {fixedpoint.NewFromFloat(110.0), fixedpoint.NewFromFloat(1.5)}, + {fixedpoint.NewFromFloat(120.0), fixedpoint.NewFromFloat(2.5)}, + }, + } + + isValid, err := ob.IsValid() + assert.True(t, isValid) + assert.NoError(t, err) + + ob.Bids = nil + isValid, err = ob.IsValid() + assert.False(t, isValid) + assert.EqualError(t, err, "empty bids") + + ob.Bids = PriceVolumeSlice{ + {fixedpoint.NewFromFloat(80000.0), fixedpoint.NewFromFloat(1.5)}, + {fixedpoint.NewFromFloat(120.0), fixedpoint.NewFromFloat(2.5)}, + } + + ob.Asks = nil + isValid, err = ob.IsValid() + assert.False(t, isValid) + assert.EqualError(t, err, "empty asks") + + ob.Asks = PriceVolumeSlice{ + {fixedpoint.NewFromFloat(100.0), fixedpoint.NewFromFloat(1.5)}, + {fixedpoint.NewFromFloat(90.0), fixedpoint.NewFromFloat(2.5)}, + } + isValid, err = ob.IsValid() + assert.False(t, isValid) + assert.EqualError(t, err, "bid price 80000 > ask price 100") +} diff --git a/pkg/types/ordermap.go b/pkg/types/ordermap.go index d021f2652f..651f3d8c7b 100644 --- a/pkg/types/ordermap.go +++ b/pkg/types/ordermap.go @@ -1,10 +1,21 @@ package types -import "sync" +import ( + "sync" + "time" +) // OrderMap is used for storing orders by their order id type OrderMap map[uint64]Order +func (m OrderMap) Backup() (orderForms []SubmitOrder) { + for _, order := range m { + orderForms = append(orderForms, order.Backup()) + } + + return orderForms +} + func (m OrderMap) Add(o Order) { m[o.OrderID] = o } @@ -61,15 +72,28 @@ func (m OrderMap) Orders() (orders OrderSlice) { type SyncOrderMap struct { orders OrderMap + // pendingRemoval is for recording the order remove message for unknown orders. + // the order removal message might arrive before the order update, so if we found there is a pending removal, + // we should not keep the order in the order map + pendingRemoval map[uint64]time.Time + sync.RWMutex } func NewSyncOrderMap() *SyncOrderMap { return &SyncOrderMap{ - orders: make(OrderMap), + orders: make(OrderMap), + pendingRemoval: make(map[uint64]time.Time, 10), } } +func (m *SyncOrderMap) Backup() (orders []SubmitOrder) { + m.Lock() + orders = m.orders.Backup() + m.Unlock() + return orders +} + func (m *SyncOrderMap) Remove(orderID uint64) (exists bool) { m.Lock() defer m.Unlock() @@ -77,6 +101,8 @@ func (m *SyncOrderMap) Remove(orderID uint64) (exists bool) { exists = m.orders.Exists(orderID) if exists { m.orders.Remove(orderID) + } else { + m.pendingRemoval[orderID] = time.Now() } return exists @@ -87,50 +113,62 @@ func (m *SyncOrderMap) Add(o Order) { defer m.Unlock() m.orders.Add(o) + + if len(m.pendingRemoval) > 0 { + expireTime := time.Now().Add(-5 * time.Minute) + removing := make(map[uint64]struct{}) + for orderID, creationTime := range m.pendingRemoval { + if m.orders.Exists(orderID) || creationTime.Before(expireTime) { + m.orders.Remove(orderID) + removing[orderID] = struct{}{} + } + } + + for orderID := range removing { + delete(m.pendingRemoval, orderID) + } + } } func (m *SyncOrderMap) Update(o Order) { m.Lock() - defer m.Unlock() - m.orders.Update(o) + m.Unlock() } func (m *SyncOrderMap) Iterate(it func(id uint64, order Order) bool) { m.Lock() - defer m.Unlock() - for id := range m.orders { if it(id, m.orders[id]) { break } } + m.Unlock() } -func (m *SyncOrderMap) Exists(orderID uint64) bool { - m.RLock() - defer m.RUnlock() - - return m.orders.Exists(orderID) +func (m *SyncOrderMap) Exists(orderID uint64) (exists bool) { + m.Lock() + exists = m.orders.Exists(orderID) + m.Unlock() + return exists } func (m *SyncOrderMap) Len() int { - m.RLock() - defer m.RUnlock() - + m.Lock() + defer m.Unlock() return len(m.orders) } -func (m *SyncOrderMap) IDs() []uint64 { - m.RLock() - defer m.RUnlock() - - return m.orders.IDs() +func (m *SyncOrderMap) IDs() (ids []uint64) { + m.Lock() + ids = m.orders.IDs() + m.Unlock() + return ids } func (m *SyncOrderMap) FindByStatus(status OrderStatus) OrderSlice { - m.RLock() - defer m.RUnlock() + m.Lock() + defer m.Unlock() return m.orders.FindByStatus(status) } @@ -141,8 +179,8 @@ func (m *SyncOrderMap) Filled() OrderSlice { // AnyFilled find any order is filled and stop iterating the order map func (m *SyncOrderMap) AnyFilled() (order Order, ok bool) { - m.RLock() - defer m.RUnlock() + m.Lock() + defer m.Unlock() for _, o := range m.orders { if o.Status == OrderStatusFilled { @@ -167,10 +205,3 @@ func (m *SyncOrderMap) Orders() (slice OrderSlice) { } type OrderSlice []Order - -func (s OrderSlice) IDs() (ids []uint64) { - for _, o := range s { - ids = append(ids, o.OrderID) - } - return ids -} diff --git a/pkg/types/plaintext.go b/pkg/types/plaintext.go index afcb6602a7..c4c2622b31 100644 --- a/pkg/types/plaintext.go +++ b/pkg/types/plaintext.go @@ -3,3 +3,7 @@ package types type PlainText interface { PlainText() string } + +type Stringer interface { + String() string +} diff --git a/pkg/types/position.go b/pkg/types/position.go new file mode 100644 index 0000000000..9a576d73b0 --- /dev/null +++ b/pkg/types/position.go @@ -0,0 +1,499 @@ +package types + +import ( + "fmt" + "sync" + "time" + + "github.com/slack-go/slack" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/util" +) + +type PositionType string + +const ( + PositionShort = PositionType("Short") + PositionLong = PositionType("Long") + PositionClosed = PositionType("Closed") +) + +type ExchangeFee struct { + MakerFeeRate fixedpoint.Value + TakerFeeRate fixedpoint.Value +} + +type PositionRisk struct { + Leverage fixedpoint.Value `json:"leverage"` + LiquidationPrice fixedpoint.Value `json:"liquidationPrice"` +} + +type Position struct { + Symbol string `json:"symbol" db:"symbol"` + BaseCurrency string `json:"baseCurrency" db:"base"` + QuoteCurrency string `json:"quoteCurrency" db:"quote"` + + Market Market `json:"market,omitempty"` + + Base fixedpoint.Value `json:"base" db:"base"` + Quote fixedpoint.Value `json:"quote" db:"quote"` + AverageCost fixedpoint.Value `json:"averageCost" db:"average_cost"` + + // ApproximateAverageCost adds the computed fee in quote in the average cost + // This is used for calculating net profit + ApproximateAverageCost fixedpoint.Value `json:"approximateAverageCost"` + + FeeRate *ExchangeFee `json:"feeRate,omitempty"` + ExchangeFeeRates map[ExchangeName]ExchangeFee `json:"exchangeFeeRates"` + + // TotalFee stores the fee currency -> total fee quantity + TotalFee map[string]fixedpoint.Value `json:"totalFee" db:"-"` + + ChangedAt time.Time `json:"changedAt,omitempty" db:"changed_at"` + + Strategy string `json:"strategy,omitempty" db:"strategy"` + StrategyInstanceID string `json:"strategyInstanceID,omitempty" db:"strategy_instance_id"` + + AccumulatedProfit fixedpoint.Value `json:"accumulatedProfit,omitempty" db:"accumulated_profit"` + + sync.Mutex +} + +func (p *Position) CsvHeader() []string { + return []string{ + "symbol", + "time", + "average_cost", + "base", + "quote", + "accumulated_profit", + } +} + +func (p *Position) CsvRecords() [][]string { + if p.AverageCost.IsZero() && p.Base.IsZero() { + return nil + } + + return [][]string{ + { + p.Symbol, + p.ChangedAt.UTC().Format(time.RFC1123), + p.AverageCost.String(), + p.Base.String(), + p.Quote.String(), + p.AccumulatedProfit.String(), + }, + } +} + +// NewProfit generates the profit object from the current position +func (p *Position) NewProfit(trade Trade, profit, netProfit fixedpoint.Value) Profit { + return Profit{ + Symbol: p.Symbol, + QuoteCurrency: p.QuoteCurrency, + BaseCurrency: p.BaseCurrency, + AverageCost: p.AverageCost, + // profit related fields + Profit: profit, + NetProfit: netProfit, + ProfitMargin: profit.Div(trade.QuoteQuantity), + NetProfitMargin: netProfit.Div(trade.QuoteQuantity), + // trade related fields + TradeID: trade.ID, + Side: trade.Side, + IsBuyer: trade.IsBuyer, + IsMaker: trade.IsMaker, + Price: trade.Price, + Quantity: trade.Quantity, + QuoteQuantity: trade.QuoteQuantity, + // FeeInUSD: 0, + Fee: trade.Fee, + FeeCurrency: trade.FeeCurrency, + + Exchange: trade.Exchange, + IsMargin: trade.IsMargin, + IsFutures: trade.IsFutures, + IsIsolated: trade.IsIsolated, + TradedAt: trade.Time.Time(), + Strategy: p.Strategy, + StrategyInstanceID: p.StrategyInstanceID, + } +} + +func (p *Position) NewMarketCloseOrder(percentage fixedpoint.Value) *SubmitOrder { + base := p.GetBase() + + quantity := base.Abs() + if percentage.Compare(fixedpoint.One) < 0 { + quantity = quantity.Mul(percentage) + } + + if quantity.Compare(p.Market.MinQuantity) < 0 { + return nil + } + + side := SideTypeSell + sign := base.Sign() + if sign == 0 { + return nil + } else if sign < 0 { + side = SideTypeBuy + } + + return &SubmitOrder{ + Symbol: p.Symbol, + Market: p.Market, + Type: OrderTypeMarket, + Side: side, + Quantity: quantity, + } +} + +func (p *Position) IsDust(price fixedpoint.Value) bool { + base := p.GetBase().Abs() + return p.Market.IsDustQuantity(base, price) +} + +// GetBase locks the mutex and return the base quantity +// The base quantity can be negative +func (p *Position) GetBase() (base fixedpoint.Value) { + p.Lock() + base = p.Base + p.Unlock() + return base +} + +func (p *Position) UnrealizedProfit(price fixedpoint.Value) fixedpoint.Value { + base := p.GetBase() + + if p.IsLong() { + return price.Sub(p.AverageCost).Mul(base) + } else if p.IsShort() { + return p.AverageCost.Sub(price).Mul(base) + } + + return fixedpoint.Zero +} + +type FuturesPosition struct { + Symbol string `json:"symbol"` + BaseCurrency string `json:"baseCurrency"` + QuoteCurrency string `json:"quoteCurrency"` + + Market Market `json:"market"` + + Base fixedpoint.Value `json:"base"` + Quote fixedpoint.Value `json:"quote"` + AverageCost fixedpoint.Value `json:"averageCost"` + + // ApproximateAverageCost adds the computed fee in quote in the average cost + // This is used for calculating net profit + ApproximateAverageCost fixedpoint.Value `json:"approximateAverageCost"` + + FeeRate *ExchangeFee `json:"feeRate,omitempty"` + ExchangeFeeRates map[ExchangeName]ExchangeFee `json:"exchangeFeeRates"` + + // Futures data fields + Isolated bool `json:"isolated"` + UpdateTime int64 `json:"updateTime"` + PositionRisk *PositionRisk + + sync.Mutex +} + +func NewPositionFromMarket(market Market) *Position { + return &Position{ + Symbol: market.Symbol, + BaseCurrency: market.BaseCurrency, + QuoteCurrency: market.QuoteCurrency, + Market: market, + TotalFee: make(map[string]fixedpoint.Value), + } +} + +func NewPosition(symbol, base, quote string) *Position { + return &Position{ + Symbol: symbol, + BaseCurrency: base, + QuoteCurrency: quote, + TotalFee: make(map[string]fixedpoint.Value), + } +} + +func (p *Position) addTradeFee(trade Trade) { + if p.TotalFee == nil { + p.TotalFee = make(map[string]fixedpoint.Value) + } + p.TotalFee[trade.FeeCurrency] = p.TotalFee[trade.FeeCurrency].Add(trade.Fee) +} + +func (p *Position) Reset() { + p.Base = fixedpoint.Zero + p.Quote = fixedpoint.Zero + p.AverageCost = fixedpoint.Zero +} + +func (p *Position) SetFeeRate(exchangeFee ExchangeFee) { + p.FeeRate = &exchangeFee +} + +func (p *Position) SetExchangeFeeRate(ex ExchangeName, exchangeFee ExchangeFee) { + if p.ExchangeFeeRates == nil { + p.ExchangeFeeRates = make(map[ExchangeName]ExchangeFee) + } + + p.ExchangeFeeRates[ex] = exchangeFee +} + +func (p *Position) IsShort() bool { + return p.Base.Sign() < 0 +} + +func (p *Position) IsLong() bool { + return p.Base.Sign() > 0 +} + +func (p *Position) IsClosed() bool { + return p.Base.Sign() == 0 +} + +func (p *Position) Type() PositionType { + if p.Base.Sign() > 0 { + return PositionLong + } else if p.Base.Sign() < 0 { + return PositionShort + } + return PositionClosed +} + +func (p *Position) SlackAttachment() slack.Attachment { + p.Lock() + defer p.Unlock() + + averageCost := p.AverageCost + base := p.Base + quote := p.Quote + + var posType = p.Type() + var color = "" + + sign := p.Base.Sign() + if sign == 0 { + color = "#cccccc" + } else if sign > 0 { + color = "#228B22" + } else if sign < 0 { + color = "#DC143C" + } + + title := util.Render(string(posType)+` Position {{ .Symbol }} `, p) + + fields := []slack.AttachmentField{ + {Title: "Average Cost", Value: averageCost.String() + " " + p.QuoteCurrency, Short: true}, + {Title: p.BaseCurrency, Value: base.String(), Short: true}, + {Title: p.QuoteCurrency, Value: quote.String()}, + } + + if p.TotalFee != nil { + for feeCurrency, fee := range p.TotalFee { + if fee.Sign() > 0 { + fields = append(fields, slack.AttachmentField{ + Title: fmt.Sprintf("Fee (%s)", feeCurrency), + Value: fee.String(), + Short: true, + }) + } + } + } + + return slack.Attachment{ + // Pretext: "", + // Text: text, + Title: title, + Color: color, + Fields: fields, + Footer: util.Render("update time {{ . }}", time.Now().Format(time.RFC822)), + // FooterIcon: "", + } +} + +func (p *Position) PlainText() (msg string) { + posType := p.Type() + msg = fmt.Sprintf("%s Position %s: average cost = %v, base = %v, quote = %v", + posType, + p.Symbol, + p.AverageCost, + p.Base, + p.Quote, + ) + + if p.TotalFee != nil { + for feeCurrency, fee := range p.TotalFee { + msg += fmt.Sprintf("\nfee (%s) = %v", feeCurrency, fee) + } + } + + return msg +} + +func (p *Position) String() string { + return fmt.Sprintf("POSITION %s: average cost = %v, base = %v, quote = %v", + p.Symbol, + p.AverageCost, + p.Base, + p.Quote, + ) +} + +func (p *Position) BindStream(stream Stream) { + stream.OnTradeUpdate(func(trade Trade) { + if p.Symbol == trade.Symbol { + p.AddTrade(trade) + } + }) +} + +func (p *Position) AddTrades(trades []Trade) (fixedpoint.Value, fixedpoint.Value, bool) { + var totalProfitAmount, totalNetProfit fixedpoint.Value + for _, trade := range trades { + if profit, netProfit, madeProfit := p.AddTrade(trade); madeProfit { + totalProfitAmount = totalProfitAmount.Add(profit) + totalNetProfit = totalNetProfit.Add(netProfit) + } + } + + return totalProfitAmount, totalNetProfit, !totalProfitAmount.IsZero() +} + +func (p *Position) AddTrade(td Trade) (profit fixedpoint.Value, netProfit fixedpoint.Value, madeProfit bool) { + price := td.Price + quantity := td.Quantity + quoteQuantity := td.QuoteQuantity + fee := td.Fee + + // calculated fee in quote (some exchange accounts may enable platform currency fee discount, like BNB) + // convert platform fee token into USD values + var feeInQuote = fixedpoint.Zero + + switch td.FeeCurrency { + + case p.BaseCurrency: + if !td.IsFutures { + quantity = quantity.Sub(fee) + } + + case p.QuoteCurrency: + if !td.IsFutures { + quoteQuantity = quoteQuantity.Sub(fee) + } + + default: + if !td.Fee.IsZero() { + if p.ExchangeFeeRates != nil { + if exchangeFee, ok := p.ExchangeFeeRates[td.Exchange]; ok { + if td.IsMaker { + feeInQuote = feeInQuote.Add(exchangeFee.MakerFeeRate.Mul(quoteQuantity)) + } else { + feeInQuote = feeInQuote.Add(exchangeFee.TakerFeeRate.Mul(quoteQuantity)) + } + } + } else if p.FeeRate != nil { + if td.IsMaker { + feeInQuote = feeInQuote.Add(p.FeeRate.MakerFeeRate.Mul(quoteQuantity)) + } else { + feeInQuote = feeInQuote.Add(p.FeeRate.TakerFeeRate.Mul(quoteQuantity)) + } + } + } + } + + p.Lock() + defer p.Unlock() + + // update changedAt field before we unlock in the defer func + defer func() { + p.ChangedAt = td.Time.Time() + }() + + p.addTradeFee(td) + + // Base > 0 means we're in long position + // Base < 0 means we're in short position + switch td.Side { + + case SideTypeBuy: + if p.Base.Sign() < 0 { + // convert short position to long position + if p.Base.Add(quantity).Sign() > 0 { + profit = p.AverageCost.Sub(price).Mul(p.Base.Neg()) + netProfit = p.ApproximateAverageCost.Sub(price).Mul(p.Base.Neg()).Sub(feeInQuote) + p.Base = p.Base.Add(quantity) + p.Quote = p.Quote.Sub(quoteQuantity) + p.AverageCost = price + p.ApproximateAverageCost = price + p.AccumulatedProfit = p.AccumulatedProfit.Add(profit) + return profit, netProfit, true + } else { + // covering short position + p.Base = p.Base.Add(quantity) + p.Quote = p.Quote.Sub(quoteQuantity) + profit = p.AverageCost.Sub(price).Mul(quantity) + netProfit = p.ApproximateAverageCost.Sub(price).Mul(quantity).Sub(feeInQuote) + p.AccumulatedProfit = p.AccumulatedProfit.Add(profit) + return profit, netProfit, true + } + } + + divisor := p.Base.Add(quantity) + p.ApproximateAverageCost = p.ApproximateAverageCost.Mul(p.Base). + Add(quoteQuantity). + Add(feeInQuote). + Div(divisor) + p.AverageCost = p.AverageCost.Mul(p.Base).Add(quoteQuantity).Div(divisor) + p.Base = p.Base.Add(quantity) + p.Quote = p.Quote.Sub(quoteQuantity) + + return fixedpoint.Zero, fixedpoint.Zero, false + + case SideTypeSell: + if p.Base.Sign() > 0 { + // convert long position to short position + if p.Base.Compare(quantity) < 0 { + profit = price.Sub(p.AverageCost).Mul(p.Base) + netProfit = price.Sub(p.ApproximateAverageCost).Mul(p.Base).Sub(feeInQuote) + p.Base = p.Base.Sub(quantity) + p.Quote = p.Quote.Add(quoteQuantity) + p.AverageCost = price + p.ApproximateAverageCost = price + p.AccumulatedProfit = p.AccumulatedProfit.Add(profit) + return profit, netProfit, true + } else { + p.Base = p.Base.Sub(quantity) + p.Quote = p.Quote.Add(quoteQuantity) + profit = price.Sub(p.AverageCost).Mul(quantity) + netProfit = price.Sub(p.ApproximateAverageCost).Mul(quantity).Sub(feeInQuote) + p.AccumulatedProfit = p.AccumulatedProfit.Add(profit) + return profit, netProfit, true + } + } + + // handling short position, since Base here is negative we need to reverse the sign + divisor := quantity.Sub(p.Base) + p.ApproximateAverageCost = p.ApproximateAverageCost.Mul(p.Base.Neg()). + Add(quoteQuantity). + Sub(feeInQuote). + Div(divisor) + + p.AverageCost = p.AverageCost.Mul(p.Base.Neg()). + Add(quoteQuantity). + Div(divisor) + p.Base = p.Base.Sub(quantity) + p.Quote = p.Quote.Add(quoteQuantity) + + return fixedpoint.Zero, fixedpoint.Zero, false + } + + return fixedpoint.Zero, fixedpoint.Zero, false +} diff --git a/pkg/types/position_test.go b/pkg/types/position_test.go new file mode 100644 index 0000000000..904f3bb8d2 --- /dev/null +++ b/pkg/types/position_test.go @@ -0,0 +1,275 @@ +package types + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +const Delta = 1e-9 + +func TestPosition_ExchangeFeeRate_Short(t *testing.T) { + pos := &Position{ + Symbol: "BTCUSDT", + BaseCurrency: "BTC", + QuoteCurrency: "USDT", + } + + feeRate := fixedpoint.NewFromFloat(0.075 * 0.01) + pos.SetExchangeFeeRate(ExchangeBinance, ExchangeFee{ + MakerFeeRate: feeRate, + TakerFeeRate: feeRate, + }) + + quantity := fixedpoint.NewFromInt(10) + quoteQuantity := fixedpoint.NewFromInt(3000).Mul(quantity) + fee := quoteQuantity.Mul(feeRate) + averageCost := quoteQuantity.Sub(fee).Div(quantity) + bnbPrice := fixedpoint.NewFromInt(570) + pos.AddTrade(Trade{ + Exchange: ExchangeBinance, + Price: fixedpoint.NewFromInt(3000), + Quantity: quantity, + QuoteQuantity: quoteQuantity, + Symbol: "BTCUSDT", + Side: SideTypeSell, + Fee: fee.Div(bnbPrice), + FeeCurrency: "BNB", + }) + + _, netProfit, madeProfit := pos.AddTrade(Trade{ + Exchange: ExchangeBinance, + Price: fixedpoint.NewFromInt(2000), + Quantity: fixedpoint.NewFromInt(10), + QuoteQuantity: fixedpoint.NewFromInt(2000 * 10), + Symbol: "BTCUSDT", + Side: SideTypeBuy, + Fee: fixedpoint.NewFromInt(2000 * 10.0).Mul(feeRate).Div(bnbPrice), + FeeCurrency: "BNB", + }) + + expectedProfit := averageCost.Sub(fixedpoint.NewFromInt(2000)). + Mul(fixedpoint.NewFromInt(10)). + Sub(fixedpoint.NewFromInt(2000).Mul(fixedpoint.NewFromInt(10)).Mul(feeRate)) + assert.True(t, madeProfit) + assert.Equal(t, expectedProfit, netProfit) +} + +func TestPosition_ExchangeFeeRate_Long(t *testing.T) { + pos := &Position{ + Symbol: "BTCUSDT", + BaseCurrency: "BTC", + QuoteCurrency: "USDT", + } + + feeRate := fixedpoint.NewFromFloat(0.075 * 0.01) + pos.SetExchangeFeeRate(ExchangeBinance, ExchangeFee{ + MakerFeeRate: feeRate, + TakerFeeRate: feeRate, + }) + + quantity := fixedpoint.NewFromInt(10) + quoteQuantity := fixedpoint.NewFromInt(3000).Mul(quantity) + fee := quoteQuantity.Mul(feeRate) + averageCost := quoteQuantity.Add(fee).Div(quantity) + bnbPrice := fixedpoint.NewFromInt(570) + pos.AddTrade(Trade{ + Exchange: ExchangeBinance, + Price: fixedpoint.NewFromInt(3000), + Quantity: quantity, + QuoteQuantity: quoteQuantity, + Symbol: "BTCUSDT", + Side: SideTypeBuy, + Fee: fee.Div(bnbPrice), + FeeCurrency: "BNB", + }) + + _, netProfit, madeProfit := pos.AddTrade(Trade{ + Exchange: ExchangeBinance, + Price: fixedpoint.NewFromInt(4000), + Quantity: fixedpoint.NewFromInt(10), + QuoteQuantity: fixedpoint.NewFromInt(4000).Mul(fixedpoint.NewFromInt(10)), + Symbol: "BTCUSDT", + Side: SideTypeSell, + Fee: fixedpoint.NewFromInt(40000).Mul(feeRate).Div(bnbPrice), + FeeCurrency: "BNB", + }) + + expectedProfit := fixedpoint.NewFromInt(4000). + Sub(averageCost).Mul(fixedpoint.NewFromInt(10)). + Sub(fixedpoint.NewFromInt(40000).Mul(feeRate)) + assert.True(t, madeProfit) + assert.Equal(t, expectedProfit, netProfit) +} + +func TestPosition(t *testing.T) { + var feeRate float64 = 0.05 * 0.01 + feeRateValue := fixedpoint.NewFromFloat(feeRate) + var testcases = []struct { + name string + trades []Trade + expectedAverageCost fixedpoint.Value + expectedBase fixedpoint.Value + expectedQuote fixedpoint.Value + expectedProfit fixedpoint.Value + }{ + { + name: "base fee", + trades: []Trade{ + { + Side: SideTypeBuy, + Price: fixedpoint.NewFromInt(1000), + Quantity: fixedpoint.NewFromFloat(0.01), + QuoteQuantity: fixedpoint.NewFromFloat(1000.0 * 0.01), + Fee: fixedpoint.MustNewFromString("0.000005"), // 0.01 * 0.05 * 0.01 + FeeCurrency: "BTC", + }, + }, + expectedAverageCost: fixedpoint.NewFromFloat(1000.0 * 0.01). + Div(fixedpoint.NewFromFloat(0.01).Mul(fixedpoint.One.Sub(feeRateValue))), + expectedBase: fixedpoint.NewFromFloat(0.01). + Sub(fixedpoint.NewFromFloat(0.01).Mul(feeRateValue)), + expectedQuote: fixedpoint.NewFromFloat(0 - 1000.0*0.01), + expectedProfit: fixedpoint.Zero, + }, + { + name: "quote fee", + trades: []Trade{ + { + Side: SideTypeSell, + Price: fixedpoint.NewFromInt(1000), + Quantity: fixedpoint.NewFromFloat(0.01), + QuoteQuantity: fixedpoint.NewFromFloat(1000.0 * 0.01), + Fee: fixedpoint.NewFromFloat((1000.0 * 0.01) * feeRate), // 0.05% + FeeCurrency: "USDT", + }, + }, + expectedAverageCost: fixedpoint.NewFromFloat(1000.0 * 0.01). + Mul(fixedpoint.One.Sub(feeRateValue)). + Div(fixedpoint.NewFromFloat(0.01)), + expectedBase: fixedpoint.NewFromFloat(-0.01), + expectedQuote: fixedpoint.NewFromFloat(0.0 + 1000.0*0.01*(1.0-feeRate)), + expectedProfit: fixedpoint.Zero, + }, + { + name: "long", + trades: []Trade{ + { + Side: SideTypeBuy, + Price: fixedpoint.NewFromInt(1000), + Quantity: fixedpoint.NewFromFloat(0.01), + QuoteQuantity: fixedpoint.NewFromFloat(1000.0 * 0.01), + }, + { + Side: SideTypeBuy, + Price: fixedpoint.NewFromInt(2000), + Quantity: fixedpoint.MustNewFromString("0.03"), + QuoteQuantity: fixedpoint.NewFromFloat(2000.0 * 0.03), + }, + }, + expectedAverageCost: fixedpoint.NewFromFloat((1000.0*0.01 + 2000.0*0.03) / 0.04), + expectedBase: fixedpoint.NewFromFloat(0.01 + 0.03), + expectedQuote: fixedpoint.NewFromFloat(0 - 1000.0*0.01 - 2000.0*0.03), + expectedProfit: fixedpoint.Zero, + }, + + { + name: "long and sell", + trades: []Trade{ + { + Side: SideTypeBuy, + Price: fixedpoint.NewFromInt(1000), + Quantity: fixedpoint.NewFromFloat(0.01), + QuoteQuantity: fixedpoint.NewFromFloat(1000.0 * 0.01), + }, + { + Side: SideTypeBuy, + Price: fixedpoint.NewFromInt(2000), + Quantity: fixedpoint.MustNewFromString("0.03"), + QuoteQuantity: fixedpoint.NewFromFloat(2000.0 * 0.03), + }, + { + Side: SideTypeSell, + Price: fixedpoint.NewFromInt(3000), + Quantity: fixedpoint.NewFromFloat(0.01), + QuoteQuantity: fixedpoint.NewFromFloat(3000.0 * 0.01), + }, + }, + expectedAverageCost: fixedpoint.NewFromFloat((1000.0*0.01 + 2000.0*0.03) / 0.04), + expectedBase: fixedpoint.MustNewFromString("0.03"), + expectedQuote: fixedpoint.NewFromFloat(0 - 1000.0*0.01 - 2000.0*0.03 + 3000.0*0.01), + expectedProfit: fixedpoint.NewFromFloat((3000.0 - (1000.0*0.01+2000.0*0.03)/0.04) * 0.01), + }, + + { + name: "long and sell to short", + trades: []Trade{ + { + Side: SideTypeBuy, + Price: fixedpoint.NewFromInt(1000), + Quantity: fixedpoint.NewFromFloat(0.01), + QuoteQuantity: fixedpoint.NewFromFloat(1000.0 * 0.01), + }, + { + Side: SideTypeBuy, + Price: fixedpoint.NewFromInt(2000), + Quantity: fixedpoint.MustNewFromString("0.03"), + QuoteQuantity: fixedpoint.NewFromFloat(2000.0 * 0.03), + }, + { + Side: SideTypeSell, + Price: fixedpoint.NewFromInt(3000), + Quantity: fixedpoint.NewFromFloat(0.10), + QuoteQuantity: fixedpoint.NewFromFloat(3000.0 * 0.10), + }, + }, + + expectedAverageCost: fixedpoint.NewFromInt(3000), + expectedBase: fixedpoint.MustNewFromString("-0.06"), + expectedQuote: fixedpoint.NewFromFloat(-1000.0*0.01 - 2000.0*0.03 + 3000.0*0.1), + expectedProfit: fixedpoint.NewFromFloat((3000.0 - (1000.0*0.01+2000.0*0.03)/0.04) * 0.04), + }, + + { + name: "short", + trades: []Trade{ + { + Side: SideTypeSell, + Price: fixedpoint.NewFromInt(2000), + Quantity: fixedpoint.NewFromFloat(0.01), + QuoteQuantity: fixedpoint.NewFromFloat(2000.0 * 0.01), + }, + { + Side: SideTypeSell, + Price: fixedpoint.NewFromInt(3000), + Quantity: fixedpoint.MustNewFromString("0.03"), + QuoteQuantity: fixedpoint.NewFromFloat(3000.0 * 0.03), + }, + }, + + expectedAverageCost: fixedpoint.NewFromFloat((2000.0*0.01 + 3000.0*0.03) / (0.01 + 0.03)), + expectedBase: fixedpoint.NewFromFloat(0 - 0.01 - 0.03), + expectedQuote: fixedpoint.NewFromFloat(2000.0*0.01 + 3000.0*0.03), + expectedProfit: fixedpoint.Zero, + }, + } + + for _, testcase := range testcases { + t.Run(testcase.name, func(t *testing.T) { + pos := Position{ + Symbol: "BTCUSDT", + BaseCurrency: "BTC", + QuoteCurrency: "USDT", + } + profitAmount, _, profit := pos.AddTrades(testcase.trades) + assert.Equal(t, testcase.expectedQuote, pos.Quote, "expectedQuote") + assert.Equal(t, testcase.expectedBase, pos.Base, "expectedBase") + assert.Equal(t, testcase.expectedAverageCost, pos.AverageCost, "expectedAverageCost") + if profit { + assert.Equal(t, testcase.expectedProfit, profitAmount, "expectedProfit") + } + }) + } +} diff --git a/pkg/types/premiumindex.go b/pkg/types/premiumindex.go new file mode 100644 index 0000000000..c9ffcd0aab --- /dev/null +++ b/pkg/types/premiumindex.go @@ -0,0 +1,15 @@ +package types + +import ( + "time" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type PremiumIndex struct { + Symbol string `json:"symbol"` + MarkPrice fixedpoint.Value `json:"markPrice"` + LastFundingRate fixedpoint.Value `json:"lastFundingRate"` + NextFundingTime time.Time `json:"nextFundingTime"` + Time time.Time `json:"time"` +} diff --git a/pkg/types/price_volume_heartbeat.go b/pkg/types/price_volume_heartbeat.go new file mode 100644 index 0000000000..51b4a5bfac --- /dev/null +++ b/pkg/types/price_volume_heartbeat.go @@ -0,0 +1,31 @@ +package types + +import ( + "fmt" + "time" +) + +// PriceHeartBeat is used for monitoring the price volume update. +type PriceHeartBeat struct { + PriceVolume PriceVolume + LastTime time.Time +} + +// Update updates the price volume object and the last update time +// It returns (bool, error), when the price is successfully updated, it returns true. +// If the price is not updated (same price) and the last time exceeded the timeout, +// Then false, and an error will be returned +func (b *PriceHeartBeat) Update(pv PriceVolume, timeout time.Duration) (bool, error) { + if b.PriceVolume.Price.IsZero() || b.PriceVolume != pv { + b.PriceVolume = pv + b.LastTime = time.Now() + return true, nil // successfully updated + } else if time.Since(b.LastTime) > timeout { + return false, fmt.Errorf("price %s has not been updating for %s, last update: %s, skip quoting", + b.PriceVolume.String(), + time.Since(b.LastTime), + b.LastTime) + } + + return false, nil +} diff --git a/pkg/types/price_volume_heartbeat_test.go b/pkg/types/price_volume_heartbeat_test.go new file mode 100644 index 0000000000..16d0c28784 --- /dev/null +++ b/pkg/types/price_volume_heartbeat_test.go @@ -0,0 +1,29 @@ +package types + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +func TestPriceHeartBeat_Update(t *testing.T) { + hb := PriceHeartBeat{} + updated, err := hb.Update(PriceVolume{Price: fixedpoint.NewFromFloat(22.0), Volume: fixedpoint.NewFromFloat(100.0)}, time.Minute) + assert.NoError(t, err) + assert.True(t, updated) + + updated, err = hb.Update(PriceVolume{Price: fixedpoint.NewFromFloat(22.0), Volume: fixedpoint.NewFromFloat(100.0)}, time.Minute) + assert.NoError(t, err) + assert.False(t, updated, "should not be updated when pv is not changed") + + updated, err = hb.Update(PriceVolume{Price: fixedpoint.NewFromFloat(23.0), Volume: fixedpoint.NewFromFloat(100.0)}, time.Minute) + assert.NoError(t, err) + assert.True(t, updated, "should be updated when the price is changed") + + updated, err = hb.Update(PriceVolume{Price: fixedpoint.NewFromFloat(23.0), Volume: fixedpoint.NewFromFloat(200.0)}, time.Minute) + assert.NoError(t, err) + assert.True(t, updated, "should be updated when the volume is changed") +} diff --git a/pkg/types/price_volume_slice.go b/pkg/types/price_volume_slice.go new file mode 100644 index 0000000000..a7863702fe --- /dev/null +++ b/pkg/types/price_volume_slice.go @@ -0,0 +1,166 @@ +package types + +import ( + "encoding/json" + "fmt" + "sort" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type PriceVolume struct { + Price, Volume fixedpoint.Value +} + +func (p PriceVolume) String() string { + return fmt.Sprintf("PriceVolume{ price: %s, volume: %s }", p.Price.String(), p.Volume.String()) +} + +type PriceVolumeSlice []PriceVolume + +func (slice PriceVolumeSlice) Len() int { return len(slice) } +func (slice PriceVolumeSlice) Less(i, j int) bool { return slice[i].Price.Compare(slice[j].Price) < 0 } +func (slice PriceVolumeSlice) Swap(i, j int) { slice[i], slice[j] = slice[j], slice[i] } + +// Trim removes the pairs that volume = 0 +func (slice PriceVolumeSlice) Trim() (pvs PriceVolumeSlice) { + for _, pv := range slice { + if pv.Volume.Sign() > 0 { + pvs = append(pvs, pv) + } + } + + return pvs +} + +func (slice PriceVolumeSlice) CopyDepth(depth int) PriceVolumeSlice { + if depth > len(slice) { + return slice.Copy() + } + + var s = make(PriceVolumeSlice, depth) + copy(s, slice[:depth]) + return s +} + +func (slice PriceVolumeSlice) Copy() PriceVolumeSlice { + var s = make(PriceVolumeSlice, len(slice)) + copy(s, slice) + return s +} + +func (slice PriceVolumeSlice) Second() (PriceVolume, bool) { + if len(slice) > 1 { + return slice[1], true + } + return PriceVolume{}, false +} + +func (slice PriceVolumeSlice) First() (PriceVolume, bool) { + if len(slice) > 0 { + return slice[0], true + } + return PriceVolume{}, false +} + +func (slice PriceVolumeSlice) IndexByVolumeDepth(requiredVolume fixedpoint.Value) int { + var tv fixedpoint.Value = fixedpoint.Zero + for x, el := range slice { + tv = tv.Add(el.Volume) + if tv.Compare(requiredVolume) >= 0 { + return x + } + } + + // not deep enough + return -1 +} + +func (slice PriceVolumeSlice) InsertAt(idx int, pv PriceVolume) PriceVolumeSlice { + rear := append([]PriceVolume{}, slice[idx:]...) + newSlice := append(slice[:idx], pv) + return append(newSlice, rear...) +} + +func (slice PriceVolumeSlice) Remove(price fixedpoint.Value, descending bool) PriceVolumeSlice { + matched, idx := slice.Find(price, descending) + if matched.Price.Compare(price) != 0 || matched.Price.IsZero() { + return slice + } + + return append(slice[:idx], slice[idx+1:]...) +} + +// Find finds the pair by the given price, this function is a read-only +// operation, so we use the value receiver to avoid copy value from the pointer +// If the price is not found, it will return the index where the price can be inserted at. +// true for descending (bid orders), false for ascending (ask orders) +func (slice PriceVolumeSlice) Find(price fixedpoint.Value, descending bool) (pv PriceVolume, idx int) { + idx = sort.Search(len(slice), func(i int) bool { + if descending { + return slice[i].Price.Compare(price) <= 0 + } + return slice[i].Price.Compare(price) >= 0 + }) + + if idx >= len(slice) || slice[idx].Price.Compare(price) != 0 { + return pv, idx + } + + pv = slice[idx] + + return pv, idx +} + +func (slice PriceVolumeSlice) Upsert(pv PriceVolume, descending bool) PriceVolumeSlice { + if len(slice) == 0 { + return append(slice, pv) + } + + price := pv.Price + _, idx := slice.Find(price, descending) + if idx >= len(slice) || slice[idx].Price.Compare(price) != 0 { + return slice.InsertAt(idx, pv) + } + + slice[idx].Volume = pv.Volume + return slice +} + +func (slice *PriceVolumeSlice) UnmarshalJSON(b []byte) error { + s, err := ParsePriceVolumeSliceJSON(b) + if err != nil { + return err + } + + *slice = s + return nil +} + +// ParsePriceVolumeSliceJSON tries to parse a 2 dimensional string array into a PriceVolumeSlice +// +// [["9000", "10"], ["9900", "10"], ... ] +// +func ParsePriceVolumeSliceJSON(b []byte) (slice PriceVolumeSlice, err error) { + var as [][]fixedpoint.Value + + err = json.Unmarshal(b, &as) + if err != nil { + return slice, err + } + + for _, a := range as { + var pv PriceVolume + pv.Price = a[0] + pv.Volume = a[1] + + // kucoin returns price in 0, we should skip + if pv.Price.Eq(fixedpoint.Zero) { + continue + } + + slice = append(slice, pv) + } + + return slice, nil +} diff --git a/pkg/types/price_volume_slice_test.go b/pkg/types/price_volume_slice_test.go new file mode 100644 index 0000000000..cf0b1e8ab5 --- /dev/null +++ b/pkg/types/price_volume_slice_test.go @@ -0,0 +1,30 @@ +package types + +import ( + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/stretchr/testify/assert" +) + +func TestPriceVolumeSlice_Remove(t *testing.T) { + for _, descending := range []bool{true, false} { + slice := PriceVolumeSlice{} + slice = slice.Upsert(PriceVolume{Price: fixedpoint.One}, descending) + slice = slice.Upsert(PriceVolume{Price: fixedpoint.NewFromInt(3)}, descending) + slice = slice.Upsert(PriceVolume{Price: fixedpoint.NewFromInt(5)}, descending) + assert.Equal(t, 3, len(slice), "with descending %v", descending) + + slice = slice.Remove(fixedpoint.NewFromInt(2), descending) + assert.Equal(t, 3, len(slice), "with descending %v", descending) + + slice = slice.Remove(fixedpoint.NewFromInt(3), descending) + assert.Equal(t, 2, len(slice), "with descending %v", descending) + + slice = slice.Remove(fixedpoint.NewFromInt(99), descending) + assert.Equal(t, 2, len(slice), "with descending %v", descending) + + slice = slice.Remove(fixedpoint.Zero, descending) + assert.Equal(t, 2, len(slice), "with descending %v", descending) + } +} diff --git a/pkg/types/profit.go b/pkg/types/profit.go new file mode 100644 index 0000000000..f783f996a9 --- /dev/null +++ b/pkg/types/profit.go @@ -0,0 +1,374 @@ +package types + +import ( + "fmt" + "time" + + "github.com/slack-go/slack" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/c9s/bbgo/pkg/util" +) + +// Profit struct stores the PnL information +type Profit struct { + // --- position related fields + // ------------------------------------------- + // Symbol is the symbol of the position + Symbol string `json:"symbol"` + QuoteCurrency string `json:"quoteCurrency" db:"quote_currency"` + BaseCurrency string `json:"baseCurrency" db:"base_currency"` + AverageCost fixedpoint.Value `json:"averageCost" db:"average_cost"` + + // profit related fields + // ------------------------------------------- + // Profit is the profit of this trade made. negative profit means loss. + Profit fixedpoint.Value `json:"profit" db:"profit"` + + // NetProfit is (profit - trading fee) + NetProfit fixedpoint.Value `json:"netProfit" db:"net_profit"` + + // ProfitMargin is a percentage of the profit and the capital amount + ProfitMargin fixedpoint.Value `json:"profitMargin" db:"profit_margin"` + + // NetProfitMargin is a percentage of the net profit and the capital amount + NetProfitMargin fixedpoint.Value `json:"netProfitMargin" db:"net_profit_margin"` + + // trade related fields + // -------------------------------------------- + // TradeID is the exchange trade id of that trade + TradeID uint64 `json:"tradeID" db:"trade_id"` + Side SideType `json:"side" db:"side"` + IsBuyer bool `json:"isBuyer" db:"is_buyer"` + IsMaker bool `json:"isMaker" db:"is_maker"` + Price fixedpoint.Value `json:"price" db:"price"` + Quantity fixedpoint.Value `json:"quantity" db:"quantity"` + QuoteQuantity fixedpoint.Value `json:"quoteQuantity" db:"quote_quantity"` + + // FeeInUSD is the summed fee of this profit, + // you will need to convert the trade fee into USD since the fee currencies can be different. + FeeInUSD fixedpoint.Value `json:"feeInUSD" db:"fee_in_usd"` + Fee fixedpoint.Value `json:"fee" db:"fee"` + FeeCurrency string `json:"feeCurrency" db:"fee_currency"` + Exchange ExchangeName `json:"exchange" db:"exchange"` + IsMargin bool `json:"isMargin" db:"is_margin"` + IsFutures bool `json:"isFutures" db:"is_futures"` + IsIsolated bool `json:"isIsolated" db:"is_isolated"` + TradedAt time.Time `json:"tradedAt" db:"traded_at"` + + // strategy related fields + Strategy string `json:"strategy" db:"strategy"` + StrategyInstanceID string `json:"strategyInstanceID" db:"strategy_instance_id"` +} + +func (p *Profit) SlackAttachment() slack.Attachment { + var color = pnlColor(p.Profit) + var title = fmt.Sprintf("%s PnL ", p.Symbol) + title += pnlEmojiMargin(p.Profit, p.ProfitMargin, defaultPnlLevelResolution) + " " + title += pnlSignString(p.Profit) + " " + p.QuoteCurrency + + var fields []slack.AttachmentField + + if !p.NetProfit.IsZero() { + fields = append(fields, slack.AttachmentField{ + Title: "Net Profit", + Value: pnlSignString(p.NetProfit) + " " + p.QuoteCurrency, + Short: true, + }) + } + + if !p.ProfitMargin.IsZero() { + fields = append(fields, slack.AttachmentField{ + Title: "Profit Margin", + Value: p.ProfitMargin.Percentage(), + Short: true, + }) + } + + if !p.NetProfitMargin.IsZero() { + fields = append(fields, slack.AttachmentField{ + Title: "Net Profit Margin", + Value: p.NetProfitMargin.Percentage(), + Short: true, + }) + } + + if !p.QuoteQuantity.IsZero() { + fields = append(fields, slack.AttachmentField{ + Title: "Trade Amount", + Value: p.QuoteQuantity.String() + " " + p.QuoteCurrency, + Short: true, + }) + } + + if !p.FeeInUSD.IsZero() { + fields = append(fields, slack.AttachmentField{ + Title: "Fee In USD", + Value: p.FeeInUSD.String() + " USD", + Short: true, + }) + } + + if len(p.Strategy) != 0 { + fields = append(fields, slack.AttachmentField{ + Title: "Strategy", + Value: p.Strategy, + Short: true, + }) + } + + return slack.Attachment{ + Color: color, + Title: title, + Fields: fields, + // Footer: "", + } +} + +func (p *Profit) PlainText() string { + var emoji string + if !p.ProfitMargin.IsZero() { + emoji = pnlEmojiMargin(p.Profit, p.ProfitMargin, defaultPnlLevelResolution) + } else { + emoji = pnlEmojiSimple(p.Profit) + } + + return fmt.Sprintf("%s trade profit %s %s %s (%s), net profit =~ %s %s (%s)", + p.Symbol, + emoji, + p.Profit.String(), p.QuoteCurrency, + p.ProfitMargin.Percentage(), + p.NetProfit.String(), p.QuoteCurrency, + p.NetProfitMargin.Percentage(), + ) +} + +var lossEmoji = "🔥" +var profitEmoji = "💰" +var defaultPnlLevelResolution = fixedpoint.NewFromFloat(0.001) + +func pnlColor(pnl fixedpoint.Value) string { + if pnl.Sign() > 0 { + return GreenColor + } + return RedColor +} + +func pnlSignString(pnl fixedpoint.Value) string { + if pnl.Sign() > 0 { + return "+" + pnl.String() + } + return pnl.String() +} + +func pnlEmojiSimple(pnl fixedpoint.Value) string { + if pnl.Sign() < 0 { + return lossEmoji + } + + if pnl.IsZero() { + return "" + } + + return profitEmoji +} + +func pnlEmojiMargin(pnl, margin, resolution fixedpoint.Value) (out string) { + if margin.IsZero() { + return pnlEmojiSimple(pnl) + } + + if pnl.Sign() < 0 { + out = lossEmoji + level := (margin.Neg()).Div(resolution).Int() + for i := 1; i < level; i++ { + out += lossEmoji + } + return out + } + + if pnl.IsZero() { + return out + } + + out = profitEmoji + level := margin.Div(resolution).Int() + for i := 1; i < level; i++ { + out += profitEmoji + } + return out +} + +type ProfitStats struct { + Symbol string `json:"symbol"` + QuoteCurrency string `json:"quoteCurrency"` + BaseCurrency string `json:"baseCurrency"` + + AccumulatedPnL fixedpoint.Value `json:"accumulatedPnL,omitempty"` + AccumulatedNetProfit fixedpoint.Value `json:"accumulatedNetProfit,omitempty"` + AccumulatedProfit fixedpoint.Value `json:"accumulatedProfit,omitempty"` + AccumulatedLoss fixedpoint.Value `json:"accumulatedLoss,omitempty"` + AccumulatedVolume fixedpoint.Value `json:"accumulatedVolume,omitempty"` + AccumulatedSince int64 `json:"accumulatedSince,omitempty"` + + TodayPnL fixedpoint.Value `json:"todayPnL,omitempty"` + TodayNetProfit fixedpoint.Value `json:"todayNetProfit,omitempty"` + TodayProfit fixedpoint.Value `json:"todayProfit,omitempty"` + TodayLoss fixedpoint.Value `json:"todayLoss,omitempty"` + TodaySince int64 `json:"todaySince,omitempty"` +} + +func NewProfitStats(market Market) *ProfitStats { + return &ProfitStats{ + Symbol: market.Symbol, + BaseCurrency: market.BaseCurrency, + QuoteCurrency: market.QuoteCurrency, + AccumulatedSince: time.Now().Unix(), + } +} + +func (s *ProfitStats) Init(market Market) { + s.Symbol = market.Symbol + s.BaseCurrency = market.BaseCurrency + s.QuoteCurrency = market.QuoteCurrency + if s.AccumulatedSince == 0 { + s.AccumulatedSince = time.Now().Unix() + } +} + +func (s *ProfitStats) AddProfit(profit Profit) { + s.AccumulatedPnL = s.AccumulatedPnL.Add(profit.Profit) + s.AccumulatedNetProfit = s.AccumulatedNetProfit.Add(profit.NetProfit) + + s.TodayPnL = s.TodayPnL.Add(profit.Profit) + s.TodayNetProfit = s.TodayNetProfit.Add(profit.NetProfit) + + if profit.Profit.Sign() < 0 { + s.AccumulatedLoss = s.AccumulatedLoss.Add(profit.Profit) + s.TodayLoss = s.TodayLoss.Add(profit.Profit) + } else if profit.Profit.Sign() > 0 { + s.AccumulatedProfit = s.AccumulatedLoss.Add(profit.Profit) + s.TodayProfit = s.TodayProfit.Add(profit.Profit) + } +} + +func (s *ProfitStats) AddTrade(trade Trade) { + if s.IsOver24Hours() { + s.ResetToday() + } + + s.AccumulatedVolume = s.AccumulatedVolume.Add(trade.Quantity) +} + +func (s *ProfitStats) IsOver24Hours() bool { + return time.Since(time.Unix(s.TodaySince, 0)) > 24*time.Hour +} + +func (s *ProfitStats) ResetToday() { + s.TodayPnL = fixedpoint.Zero + s.TodayNetProfit = fixedpoint.Zero + s.TodayProfit = fixedpoint.Zero + s.TodayLoss = fixedpoint.Zero + + var beginningOfTheDay = util.BeginningOfTheDay(time.Now().Local()) + s.TodaySince = beginningOfTheDay.Unix() +} + +func (s *ProfitStats) PlainText() string { + since := time.Unix(s.AccumulatedSince, 0).Local() + return fmt.Sprintf("%s Profit Today\n"+ + "Profit %s %s\n"+ + "Net profit %s %s\n"+ + "Trade Loss %s %s\n"+ + "Summary:\n"+ + "Accumulated Profit %s %s\n"+ + "Accumulated Net Profit %s %s\n"+ + "Accumulated Trade Loss %s %s\n"+ + "Since %s", + s.Symbol, + s.TodayPnL.String(), s.QuoteCurrency, + s.TodayNetProfit.String(), s.QuoteCurrency, + s.TodayLoss.String(), s.QuoteCurrency, + s.AccumulatedPnL.String(), s.QuoteCurrency, + s.AccumulatedNetProfit.String(), s.QuoteCurrency, + s.AccumulatedLoss.String(), s.QuoteCurrency, + since.Format(time.RFC822), + ) +} + +func (s *ProfitStats) SlackAttachment() slack.Attachment { + var color = pnlColor(s.AccumulatedPnL) + var title = fmt.Sprintf("%s Accumulated PnL %s %s", s.Symbol, pnlSignString(s.AccumulatedPnL), s.QuoteCurrency) + + since := time.Unix(s.AccumulatedSince, 0).Local() + title += " Since " + since.Format(time.RFC822) + + var fields []slack.AttachmentField + + if !s.TodayPnL.IsZero() { + fields = append(fields, slack.AttachmentField{ + Title: "P&L Today", + Value: pnlSignString(s.TodayPnL) + " " + s.QuoteCurrency, + Short: true, + }) + } + + if !s.TodayProfit.IsZero() { + fields = append(fields, slack.AttachmentField{ + Title: "Profit Today", + Value: pnlSignString(s.TodayProfit) + " " + s.QuoteCurrency, + Short: true, + }) + } + + if !s.TodayNetProfit.IsZero() { + fields = append(fields, slack.AttachmentField{ + Title: "Net Profit Today", + Value: pnlSignString(s.TodayNetProfit) + " " + s.QuoteCurrency, + Short: true, + }) + } + + if !s.TodayLoss.IsZero() { + fields = append(fields, slack.AttachmentField{ + Title: "Loss Today", + Value: pnlSignString(s.TodayLoss) + " " + s.QuoteCurrency, + Short: true, + }) + } + + if !s.AccumulatedPnL.IsZero() { + fields = append(fields, slack.AttachmentField{ + Title: "Accumulated P&L", + Value: pnlSignString(s.AccumulatedPnL) + " " + s.QuoteCurrency, + }) + } + + if !s.AccumulatedProfit.IsZero() { + fields = append(fields, slack.AttachmentField{ + Title: "Accumulated Profit", + Value: pnlSignString(s.AccumulatedProfit) + " " + s.QuoteCurrency, + }) + } + + if !s.AccumulatedNetProfit.IsZero() { + fields = append(fields, slack.AttachmentField{ + Title: "Accumulated Net Profit", + Value: pnlSignString(s.AccumulatedNetProfit) + " " + s.QuoteCurrency, + }) + } + + if !s.AccumulatedLoss.IsZero() { + fields = append(fields, slack.AttachmentField{ + Title: "Accumulated Loss", + Value: pnlSignString(s.AccumulatedLoss) + " " + s.QuoteCurrency, + }) + } + + return slack.Attachment{ + Color: color, + Title: title, + Fields: fields, + // Footer: "", + } +} diff --git a/pkg/types/rbtorderbook.go b/pkg/types/rbtorderbook.go new file mode 100644 index 0000000000..d9fdd126ba --- /dev/null +++ b/pkg/types/rbtorderbook.go @@ -0,0 +1,199 @@ +package types + +import ( + "fmt" + "time" + + "github.com/pkg/errors" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +//go:generate callbackgen -type RBTOrderBook +type RBTOrderBook struct { + Symbol string + Bids *RBTree + Asks *RBTree + + lastUpdateTime time.Time + + loadCallbacks []func(book *RBTOrderBook) + updateCallbacks []func(book *RBTOrderBook) +} + +func NewRBOrderBook(symbol string) *RBTOrderBook { + return &RBTOrderBook{ + Symbol: symbol, + Bids: NewRBTree(), + Asks: NewRBTree(), + } +} + +func (b *RBTOrderBook) LastUpdateTime() time.Time { + return b.lastUpdateTime +} + +func (b *RBTOrderBook) BestBid() (PriceVolume, bool) { + right := b.Bids.Rightmost() + if right != nil { + return PriceVolume{Price: right.key, Volume: right.value}, true + } + + return PriceVolume{}, false +} + +func (b *RBTOrderBook) BestAsk() (PriceVolume, bool) { + left := b.Asks.Leftmost() + if left != nil { + return PriceVolume{Price: left.key, Volume: left.value}, true + } + + return PriceVolume{}, false +} + +func (b *RBTOrderBook) Spread() (fixedpoint.Value, bool) { + bestBid, ok := b.BestBid() + if !ok { + return fixedpoint.Zero, false + } + + bestAsk, ok := b.BestAsk() + if !ok { + return fixedpoint.Zero, false + } + + return bestAsk.Price.Sub(bestBid.Price), true +} + +func (b *RBTOrderBook) IsValid() (bool, error) { + bid, hasBid := b.BestBid() + ask, hasAsk := b.BestAsk() + + if !hasBid { + return false, errors.New("empty bids") + } + + if !hasAsk { + return false, errors.New("empty asks") + } + + if bid.Price.Compare(ask.Price) > 0 { + return false, fmt.Errorf("bid price %s > ask price %s", bid.Price.String(), ask.Price.String()) + } + + return true, nil +} + +func (b *RBTOrderBook) Load(book SliceOrderBook) { + b.Reset() + b.update(book) + b.EmitLoad(b) +} + +func (b *RBTOrderBook) Update(book SliceOrderBook) { + b.update(book) + b.EmitUpdate(b) +} + +func (b *RBTOrderBook) Reset() { + b.Bids = NewRBTree() + b.Asks = NewRBTree() +} + +func (b *RBTOrderBook) updateAsks(pvs PriceVolumeSlice) { + for _, pv := range pvs { + if pv.Volume.IsZero() { + b.Asks.Delete(pv.Price) + } else { + b.Asks.Upsert(pv.Price, pv.Volume) + } + } +} + +func (b *RBTOrderBook) updateBids(pvs PriceVolumeSlice) { + for _, pv := range pvs { + if pv.Volume.IsZero() { + b.Bids.Delete(pv.Price) + } else { + b.Bids.Upsert(pv.Price, pv.Volume) + } + } +} + +func (b *RBTOrderBook) update(book SliceOrderBook) { + b.updateBids(book.Bids) + b.updateAsks(book.Asks) + b.lastUpdateTime = time.Now() +} + +func (b *RBTOrderBook) load(book SliceOrderBook) { + b.Reset() + b.updateBids(book.Bids) + b.updateAsks(book.Asks) + b.lastUpdateTime = time.Now() +} + +func (b *RBTOrderBook) Copy() OrderBook { + var book = NewRBOrderBook(b.Symbol) + book.Asks = b.Asks.CopyInorder(0) + book.Bids = b.Bids.CopyInorder(0) + return book +} + +func (b *RBTOrderBook) CopyDepth(limit int) OrderBook { + var book = NewRBOrderBook(b.Symbol) + book.Asks = b.Asks.CopyInorder(limit) + book.Bids = b.Bids.CopyInorderReverse(limit) + return book +} + +func (b *RBTOrderBook) convertTreeToPriceVolumeSlice(tree *RBTree, limit int, descending bool) (pvs PriceVolumeSlice) { + if descending { + tree.InorderReverse(func(n *RBNode) bool { + pvs = append(pvs, PriceVolume{ + Price: n.key, + Volume: n.value, + }) + + return !(limit > 0 && len(pvs) >= limit) + }) + + return pvs + } + + tree.Inorder(func(n *RBNode) bool { + pvs = append(pvs, PriceVolume{ + Price: n.key, + Volume: n.value, + }) + + return !(limit > 0 && len(pvs) >= limit) + }) + return pvs +} + +func (b *RBTOrderBook) SideBook(sideType SideType) PriceVolumeSlice { + switch sideType { + + case SideTypeBuy: + return b.convertTreeToPriceVolumeSlice(b.Bids, 0, true) + + case SideTypeSell: + return b.convertTreeToPriceVolumeSlice(b.Asks, 0, false) + + default: + return nil + } +} + +func (b *RBTOrderBook) Print() { + b.Asks.Inorder(func(n *RBNode) bool { + fmt.Printf("ask: %s x %s", n.key.String(), n.value.String()) + return true + }) + + b.Bids.InorderReverse(func(n *RBNode) bool { + fmt.Printf("bid: %s x %s", n.key.String(), n.value.String()) + return true + }) +} diff --git a/pkg/types/rbtorderbook_callbacks.go b/pkg/types/rbtorderbook_callbacks.go new file mode 100644 index 0000000000..6f26f44b76 --- /dev/null +++ b/pkg/types/rbtorderbook_callbacks.go @@ -0,0 +1,25 @@ +// Code generated by "callbackgen -type RBTOrderBook"; DO NOT EDIT. + +package types + +import () + +func (b *RBTOrderBook) OnLoad(cb func(book *RBTOrderBook)) { + b.loadCallbacks = append(b.loadCallbacks, cb) +} + +func (b *RBTOrderBook) EmitLoad(book *RBTOrderBook) { + for _, cb := range b.loadCallbacks { + cb(book) + } +} + +func (b *RBTOrderBook) OnUpdate(cb func(book *RBTOrderBook)) { + b.updateCallbacks = append(b.updateCallbacks, cb) +} + +func (b *RBTOrderBook) EmitUpdate(book *RBTOrderBook) { + for _, cb := range b.updateCallbacks { + cb(book) + } +} diff --git a/pkg/types/rbtorderbook_test.go b/pkg/types/rbtorderbook_test.go new file mode 100644 index 0000000000..1c8be0bec6 --- /dev/null +++ b/pkg/types/rbtorderbook_test.go @@ -0,0 +1,78 @@ +package types + +import ( + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/stretchr/testify/assert" +) + +func TestRBOrderBook_EmptyBook(t *testing.T) { + book := NewRBOrderBook("BTCUSDT") + bid, ok := book.BestBid() + assert.False(t, ok) + assert.Equal(t, fixedpoint.Zero, bid.Price) + + ask, ok := book.BestAsk() + assert.False(t, ok) + assert.Equal(t, fixedpoint.Zero, ask.Price) +} + +func TestRBOrderBook_Load(t *testing.T) { + book := NewRBOrderBook("BTCUSDT") + + book.Load(SliceOrderBook{ + Symbol: "BTCUSDT", + Bids: PriceVolumeSlice{ + {Price: fixedpoint.NewFromFloat(2800.0), Volume: fixedpoint.One}, + }, + Asks: PriceVolumeSlice{ + {Price: fixedpoint.NewFromFloat(2810.0), Volume: fixedpoint.One}, + }, + }) + + bid, ok := book.BestBid() + assert.True(t, ok) + assert.Equal(t, fixedpoint.NewFromFloat(2800.0), bid.Price) + + ask, ok := book.BestAsk() + assert.True(t, ok) + assert.Equal(t, fixedpoint.NewFromFloat(2810.0), ask.Price) +} + +func TestRBOrderBook_LoadAndDelete(t *testing.T) { + book := NewRBOrderBook("BTCUSDT") + + book.Load(SliceOrderBook{ + Symbol: "BTCUSDT", + Bids: PriceVolumeSlice{ + {Price: fixedpoint.NewFromFloat(2800.0), Volume: fixedpoint.One}, + }, + Asks: PriceVolumeSlice{ + {Price: fixedpoint.NewFromFloat(2810.0), Volume: fixedpoint.One}, + }, + }) + + bid, ok := book.BestBid() + assert.True(t, ok) + assert.Equal(t, fixedpoint.NewFromFloat(2800.0), bid.Price) + + ask, ok := book.BestAsk() + assert.True(t, ok) + assert.Equal(t, fixedpoint.NewFromFloat(2810.0), ask.Price) + + book.Load(SliceOrderBook{ + Symbol: "BTCUSDT", + Bids: PriceVolumeSlice{ + {Price: fixedpoint.NewFromFloat(2800.0), Volume: fixedpoint.Zero}, + }, + Asks: PriceVolumeSlice{ + {Price: fixedpoint.NewFromFloat(2810.0), Volume: fixedpoint.Zero}, + }, + }) + + bid, ok = book.BestBid() + assert.False(t, ok) + ask, ok = book.BestAsk() + assert.False(t, ok) +} diff --git a/pkg/types/rbtree.go b/pkg/types/rbtree.go new file mode 100644 index 0000000000..fc1c84b2af --- /dev/null +++ b/pkg/types/rbtree.go @@ -0,0 +1,474 @@ +package types + +import ( + "fmt" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type RBTree struct { + Root *RBNode + size int +} + +var neel = &RBNode{color: Black} + +func NewRBTree() *RBTree { + var root = neel + root.parent = neel + return &RBTree{ + Root: root, + } +} + +func (tree *RBTree) Delete(key fixedpoint.Value) bool { + var deleting = tree.Search(key) + if deleting == nil { + return false + } + + // y = the node to be deleted + // x (the child of the deleted node) + var x, y *RBNode + // fmt.Printf("neel = %p %+v\n", neel, neel) + // fmt.Printf("deleting = %+v\n", deleting) + + // the deleting node has only one child, it's easy, + // we just connect the child the parent of the deleting node + if deleting.left == neel || deleting.right == neel { + y = deleting + // fmt.Printf("y = deleting = %+v\n", y) + } else { + // if both children are not NIL (neel), we need to find the successor + // and copy the successor to the memory location of the deleting node. + // since it's successor, it always has no child connecting to it. + y = tree.Successor(deleting) + // fmt.Printf("y = successor = %+v\n", y) + } + + // y.left or y.right could be neel + if y.left != neel { + x = y.left + } else { + x = y.right + } + + // fmt.Printf("x = %+v\n", y) + x.parent = y.parent + + if y.parent == neel { + tree.Root = x + } else if y == y.parent.left { + y.parent.left = x + } else { + y.parent.right = x + } + + // copy the data from the successor to the memory location of the deleting node + if y != deleting { + deleting.key = y.key + deleting.value = y.value + } + + if y.color == Black { + tree.DeleteFixup(x) + } + + tree.size-- + + return true +} + +func (tree *RBTree) DeleteFixup(current *RBNode) { + for current != tree.Root && current.color == Black { + if current == current.parent.left { + sibling := current.parent.right + if sibling.color == Red { + sibling.color = Black + current.parent.color = Red + tree.RotateLeft(current.parent) + sibling = current.parent.right + } + + // if both are black nodes + if sibling.left.color == Black && sibling.right.color == Black { + sibling.color = Red + current = current.parent + } else { + // only one of the child is black + if sibling.right.color == Black { + sibling.left.color = Black + sibling.color = Red + tree.RotateRight(sibling) + sibling = current.parent.right + } + + sibling.color = current.parent.color + current.parent.color = Black + sibling.right.color = Black + tree.RotateLeft(current.parent) + current = tree.Root + } + } else { // if current is right child + sibling := current.parent.left + if sibling.color == Red { + sibling.color = Black + current.parent.color = Red + tree.RotateRight(current.parent) + sibling = current.parent.left + } + + if sibling.left.color == Black && sibling.right.color == Black { + sibling.color = Red + current = current.parent + } else { // if only one of child is Black + + // the left child of sibling is black, and right child is red + if sibling.left.color == Black { + sibling.right.color = Black + sibling.color = Red + tree.RotateLeft(sibling) + sibling = current.parent.left + } + + sibling.color = current.parent.color + current.parent.color = Black + sibling.left.color = Black + tree.RotateRight(current.parent) + current = tree.Root + } + } + } + + current.color = Black +} + +func (tree *RBTree) Upsert(key, val fixedpoint.Value) { + var y = neel + var x = tree.Root + var node = &RBNode{ + key: key, + value: val, + color: Red, + left: neel, + right: neel, + parent: neel, + } + + for x != neel { + y = x + + if node.key == x.key { + // found node, skip insert and fix + x.value = val + return + } else if node.key.Compare(x.key) < 0 { + x = x.left + } else { + x = x.right + } + } + + node.parent = y + + if y == neel { + tree.Root = node + } else if node.key.Compare(y.key) < 0 { + y.left = node + } else { + y.right = node + } + + tree.InsertFixup(node) +} + +func (tree *RBTree) Insert(key, val fixedpoint.Value) { + var y = neel + var x = tree.Root + var node = &RBNode{ + key: key, + value: val, + color: Red, + left: neel, + right: neel, + parent: neel, + } + + for x != neel { + y = x + + if node.key.Compare(x.key) < 0 { + x = x.left + } else { + x = x.right + } + } + + node.parent = y + + if y == neel { + tree.Root = node + } else if node.key.Compare(y.key) < 0 { + y.left = node + } else { + y.right = node + } + + tree.size++ + tree.InsertFixup(node) +} + +func (tree *RBTree) Search(key fixedpoint.Value) *RBNode { + var current = tree.Root + for current != neel && key != current.key { + if key.Compare(current.key) < 0 { + current = current.left + } else { + current = current.right + } + } + + if current == neel { + return nil + } + + return current +} + +func (tree *RBTree) Size() int { + return tree.size +} + +func (tree *RBTree) InsertFixup(current *RBNode) { + // A red node can't have a red parent, we need to fix it up + for current.parent.color == Red { + if current.parent == current.parent.parent.left { + uncle := current.parent.parent.right + if uncle.color == Red { + current.parent.color = Black + uncle.color = Black + current.parent.parent.color = Red + current = current.parent.parent + } else { // if uncle is black + if current == current.parent.right { + current = current.parent + tree.RotateLeft(current) + } + + current.parent.color = Black + current.parent.parent.color = Red + tree.RotateRight(current.parent.parent) + } + } else { + uncle := current.parent.parent.left + if uncle.color == Red { + current.parent.color = Black + uncle.color = Black + current.parent.parent.color = Red + current = current.parent.parent + } else { + if current == current.parent.left { + current = current.parent + tree.RotateRight(current) + } + + current.parent.color = Black + current.parent.parent.color = Red + tree.RotateLeft(current.parent.parent) + } + } + } + + // ensure that root is black + tree.Root.color = Black +} + +// RotateLeft +// x is the axes of rotation, y is the node that will be replace x's position. +// we need to: +// 1. move y's left child to the x's right child +// 2. change y's parent to x's parent +// 3. change x's parent to y +func (tree *RBTree) RotateLeft(x *RBNode) { + var y = x.right + x.right = y.left + + if y.left != neel { + y.left.parent = x + } + + y.parent = x.parent + + if x.parent == neel { + tree.Root = y + } else if x == x.parent.left { + x.parent.left = y + } else { + x.parent.right = y + } + + y.left = x + x.parent = y +} + +func (tree *RBTree) RotateRight(y *RBNode) { + x := y.left + y.left = x.right + + if x.right != neel { + x.right.parent = y + } + + x.parent = y.parent + + if y.parent == neel { + tree.Root = x + } else if y == y.parent.left { + y.parent.left = x + } else { + y.parent.right = x + } + + x.right = y + y.parent = x +} + +func (tree *RBTree) Rightmost() *RBNode { + return tree.RightmostOf(tree.Root) +} + +func (tree *RBTree) RightmostOf(current *RBNode) *RBNode { + if current == neel || current == nil { + return nil + } + + for current.right != neel { + current = current.right + } + + return current +} + +func (tree *RBTree) Leftmost() *RBNode { + return tree.LeftmostOf(tree.Root) +} + +func (tree *RBTree) LeftmostOf(current *RBNode) *RBNode { + if current == neel || current == nil { + return nil + } + + for current.left != neel { + current = current.left + } + + return current +} + +func (tree *RBTree) Successor(current *RBNode) *RBNode { + if current.right != neel { + return tree.LeftmostOf(current.right) + } + + var newNode = current.parent + for newNode != neel && current == newNode.right { + current = newNode + newNode = newNode.parent + } + + return newNode +} + +func (tree *RBTree) Preorder(cb func(n *RBNode)) { + tree.PreorderOf(tree.Root, cb) +} + +func (tree *RBTree) PreorderOf(current *RBNode, cb func(n *RBNode)) { + if current != neel && current != nil { + cb(current) + tree.PreorderOf(current.left, cb) + tree.PreorderOf(current.right, cb) + } +} + +// Inorder traverses the tree in ascending order +func (tree *RBTree) Inorder(cb func(n *RBNode) bool) { + tree.InorderOf(tree.Root, cb) +} + +func (tree *RBTree) InorderOf(current *RBNode, cb func(n *RBNode) bool) { + if current != neel && current != nil { + tree.InorderOf(current.left, cb) + if !cb(current) { + return + } + tree.InorderOf(current.right, cb) + } +} + +// InorderReverse traverses the tree in descending order +func (tree *RBTree) InorderReverse(cb func(n *RBNode) bool) { + tree.InorderReverseOf(tree.Root, cb) +} + +func (tree *RBTree) InorderReverseOf(current *RBNode, cb func(n *RBNode) bool) { + if current != neel && current != nil { + tree.InorderReverseOf(current.right, cb) + if !cb(current) { + return + } + tree.InorderReverseOf(current.left, cb) + } +} + +func (tree *RBTree) Postorder(cb func(n *RBNode) bool) { + tree.PostorderOf(tree.Root, cb) +} + +func (tree *RBTree) PostorderOf(current *RBNode, cb func(n *RBNode) bool) { + if current != neel && current != nil { + tree.PostorderOf(current.left, cb) + tree.PostorderOf(current.right, cb) + if !cb(current) { + return + } + } +} + +func (tree *RBTree) CopyInorderReverse(limit int) *RBTree { + cnt := 0 + newTree := NewRBTree() + tree.InorderReverse(func(n *RBNode) bool { + if cnt >= limit { + return false + } + + newTree.Insert(n.key, n.value) + cnt++ + return true + }) + return newTree +} + +func (tree *RBTree) CopyInorder(limit int) *RBTree { + cnt := 0 + newTree := NewRBTree() + tree.Inorder(func(n *RBNode) bool { + if limit > 0 && cnt >= limit { + return false + } + + newTree.Insert(n.key, n.value) + cnt++ + return true + }) + + return newTree +} + +func (tree *RBTree) Print() { + tree.Inorder(func(n *RBNode) bool { + fmt.Printf("%v -> %v\n", n.key, n.value) + return true + }) +} diff --git a/pkg/types/rbtree_node.go b/pkg/types/rbtree_node.go new file mode 100644 index 0000000000..b387afe56a --- /dev/null +++ b/pkg/types/rbtree_node.go @@ -0,0 +1,22 @@ +package types + +import "github.com/c9s/bbgo/pkg/fixedpoint" + +// Color is the RB Tree color +type Color bool + +const ( + Red = Color(false) + Black = Color(true) +) + +/* +RBNode +A red node always has black children. +A black node may have red or black children +*/ +type RBNode struct { + left, right, parent *RBNode + key, value fixedpoint.Value + color Color +} diff --git a/pkg/types/rbtree_test.go b/pkg/types/rbtree_test.go new file mode 100644 index 0000000000..0daddf1997 --- /dev/null +++ b/pkg/types/rbtree_test.go @@ -0,0 +1,221 @@ +package types + +import ( + "math/rand" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +var itov func(int64) fixedpoint.Value = fixedpoint.NewFromInt + +func TestRBTree_InsertAndDelete(t *testing.T) { + tree := NewRBTree() + node := tree.Rightmost() + assert.Nil(t, node) + + tree.Insert(itov(10), itov(10)) + tree.Insert(itov(9), itov(9)) + tree.Insert(itov(12), itov(12)) + tree.Insert(itov(11), itov(11)) + tree.Insert(itov(13), itov(13)) + + node = tree.Rightmost() + assert.Equal(t, itov(13), node.key) + assert.Equal(t, itov(13), node.value) + + ok := tree.Delete(fixedpoint.NewFromInt(12)) + assert.True(t, ok, "should delete the node successfully") +} + +func TestRBTree_Rightmost(t *testing.T) { + tree := NewRBTree() + node := tree.Rightmost() + assert.Nil(t, node, "should be nil") + + tree.Insert(itov(10), itov(10)) + node = tree.Rightmost() + assert.Equal(t, itov(10), node.key) + assert.Equal(t, itov(10), node.value) + + tree.Insert(itov(12), itov(12)) + tree.Insert(itov(9), itov(9)) + node = tree.Rightmost() + assert.Equal(t, itov(12), node.key) +} + +func TestRBTree_RandomInsertSearchAndDelete(t *testing.T) { + var keys []fixedpoint.Value + + tree := NewRBTree() + for i := 1; i < 100; i++ { + v := fixedpoint.NewFromFloat(rand.Float64()*100 + 1.0) + keys = append(keys, v) + tree.Insert(v, v) + } + + for _, key := range keys { + node := tree.Search(key) + assert.NotNil(t, node) + + ok := tree.Delete(key) + assert.True(t, ok, "should find and delete the node") + } +} + +func TestRBTree_CopyInorder(t *testing.T) { + tree := NewRBTree() + for i := 1.0; i < 10.0; i += 1.0 { + tree.Insert(fixedpoint.NewFromFloat(i*100.0), fixedpoint.NewFromFloat(i)) + } + + newTree := tree.CopyInorder(3) + assert.Equal(t, 3, newTree.Size()) + + newTree.Print() + + node1 := newTree.Search(fixedpoint.NewFromFloat(100.0)) + assert.NotNil(t, node1) + + node2 := newTree.Search(fixedpoint.NewFromFloat(200.0)) + assert.NotNil(t, node2) + + node3 := newTree.Search(fixedpoint.NewFromFloat(300.0)) + assert.NotNil(t, node3) + + node4 := newTree.Search(fixedpoint.NewFromFloat(400.0)) + assert.Nil(t, node4) +} + +func TestTree_Copy(t *testing.T) { + tree := NewRBTree() + tree.Insert(fixedpoint.NewFromFloat(3000.0), fixedpoint.NewFromFloat(1.0)) + assert.NotNil(t, tree.Root) + + tree.Insert(fixedpoint.NewFromFloat(4000.0), fixedpoint.NewFromFloat(2.0)) + tree.Insert(fixedpoint.NewFromFloat(2000.0), fixedpoint.NewFromFloat(3.0)) + + newTree := tree.CopyInorder(0) + node1 := newTree.Search(fixedpoint.NewFromFloat(2000.0)) + assert.NotNil(t, node1) + assert.Equal(t, fixedpoint.NewFromFloat(2000.0), node1.key) + assert.Equal(t, fixedpoint.NewFromFloat(3.0), node1.value) + + node2 := newTree.Search(fixedpoint.NewFromFloat(3000.0)) + assert.NotNil(t, node2) + assert.Equal(t, fixedpoint.NewFromFloat(3000.0), node2.key) + assert.Equal(t, fixedpoint.NewFromFloat(1.0), node2.value) + + node3 := newTree.Search(fixedpoint.NewFromFloat(4000.0)) + assert.NotNil(t, node3) + assert.Equal(t, fixedpoint.NewFromFloat(4000.0), node3.key) + assert.Equal(t, fixedpoint.NewFromFloat(2.0), node3.value) +} + +func TestRBTree_basic(t *testing.T) { + tree := NewRBTree() + tree.Insert(fixedpoint.NewFromFloat(3000.0), fixedpoint.NewFromFloat(10.0)) + assert.NotNil(t, tree.Root) + + tree.Insert(fixedpoint.NewFromFloat(4000.0), fixedpoint.NewFromFloat(10.0)) + tree.Insert(fixedpoint.NewFromFloat(2000.0), fixedpoint.NewFromFloat(10.0)) + + // root is always black + assert.Equal(t, fixedpoint.NewFromFloat(3000.0), tree.Root.key) + assert.Equal(t, Black, tree.Root.color) + + assert.Equal(t, fixedpoint.NewFromFloat(2000.0), tree.Root.left.key) + assert.Equal(t, Red, tree.Root.left.color) + + assert.Equal(t, fixedpoint.NewFromFloat(4000.0), tree.Root.right.key) + assert.Equal(t, Red, tree.Root.right.color) + + // should rotate + tree.Insert(fixedpoint.NewFromFloat(1500.0), fixedpoint.NewFromFloat(10.0)) + tree.Insert(fixedpoint.NewFromFloat(1000.0), fixedpoint.NewFromFloat(10.0)) + + deleted := tree.Delete(fixedpoint.NewFromFloat(1000.0)) + assert.True(t, deleted) + + deleted = tree.Delete(fixedpoint.NewFromFloat(1500.0)) + assert.True(t, deleted) + +} + +func TestRBTree_bulkInsert(t *testing.T) { + var pvs = map[fixedpoint.Value]fixedpoint.Value{} + var tree = NewRBTree() + for i := 0; i < 1000000; i++ { + price := fixedpoint.NewFromFloat(rand.Float64()) + volume := fixedpoint.NewFromFloat(rand.Float64()) + tree.Upsert(price, volume) + pvs[price] = volume + } + tree.Inorder(func(n *RBNode) bool { + if n.left != neel { + if !assert.True(t, n.key.Compare(n.left.key) > 0) { + return false + } + } + if n.right != neel { + if !assert.True(t, n.key.Compare(n.right.key) < 0) { + return false + } + } + return true + }) +} + +func TestRBTree_bulkInsertAndDelete(t *testing.T) { + var pvs = map[fixedpoint.Value]fixedpoint.Value{} + + var getRandomPrice = func() fixedpoint.Value { + for p := range pvs { + return p + } + return fixedpoint.Zero + } + + var tree = NewRBTree() + for i := 0; i < 1000000; i++ { + price := fixedpoint.NewFromFloat(rand.Float64()) + volume := fixedpoint.NewFromFloat(rand.Float64()) + tree.Upsert(price, volume) + pvs[price] = volume + + if i%3 == 0 || i%7 == 0 { + removePrice := getRandomPrice() + if removePrice.Sign() > 0 { + if !assert.True(t, tree.Delete(removePrice), "existing price %f should be removed at round %d", removePrice.Float64(), i) { + return + } + delete(pvs, removePrice) + } + } + } + + // all prices should be found + for p := range pvs { + node := tree.Search(p) + if !assert.NotNil(t, node, "should found price %f", p.Float64()) { + return + } + } + + // validate tree structure + tree.Inorder(func(n *RBNode) bool { + if n.left != neel { + if !assert.True(t, n.key.Compare(n.left.key) > 0) { + return false + } + } + if n.right != neel { + if !assert.True(t, n.key.Compare(n.right.key) < 0) { + return false + } + } + return true + }) +} diff --git a/pkg/types/reward.go b/pkg/types/reward.go new file mode 100644 index 0000000000..307486d152 --- /dev/null +++ b/pkg/types/reward.go @@ -0,0 +1,60 @@ +package types + +import ( + "fmt" + "time" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type RewardType string + +const ( + RewardAirdrop = RewardType("airdrop") + RewardCommission = RewardType("commission") + RewardReferralKickback = RewardType("referral_kickback") + RewardHolding = RewardType("holding") + RewardMining = RewardType("mining") + RewardTrading = RewardType("trading") + RewardVipRebate = RewardType("vip_rebate") +) + +type Reward struct { + GID int64 `json:"gid" db:"gid"` + UUID string `json:"uuid" db:"uuid"` + Exchange ExchangeName `json:"exchange" db:"exchange"` + Type RewardType `json:"reward_type" db:"reward_type"` + Currency string `json:"currency" db:"currency"` + Quantity fixedpoint.Value `json:"quantity" db:"quantity"` + State string `json:"state" db:"state"` + Note string `json:"note" db:"note"` + Spent bool `json:"spent" db:"spent"` + CreatedAt Time `json:"created_at" db:"created_at"` +} + +func (r Reward) String() (s string) { + s = fmt.Sprintf("reward %s %s %20s %20f %5s @ %s", r.Exchange, r.UUID, r.Type, r.Quantity.Float64(), r.Currency, r.CreatedAt.String()) + + if r.Note != "" { + s += ": " + r.Note + } + + return s +} + +type RewardSlice []Reward + +func (s RewardSlice) Len() int { return len(s) } +func (s RewardSlice) Swap(i, j int) { s[i], s[j] = s[j], s[i] } + +type RewardSliceByCreationTime RewardSlice + +func (s RewardSliceByCreationTime) Len() int { return len(s) } +func (s RewardSliceByCreationTime) Swap(i, j int) { s[i], s[j] = s[j], s[i] } + +// Less reports whether x[i] should be ordered before x[j] +func (s RewardSliceByCreationTime) Less(i, j int) bool { + return time.Time(s[i].CreatedAt).Before( + time.Time(s[j].CreatedAt), + ) +} diff --git a/pkg/types/side.go b/pkg/types/side.go index a24a6d508b..46c916aed2 100644 --- a/pkg/types/side.go +++ b/pkg/types/side.go @@ -1,5 +1,12 @@ package types +import ( + "encoding/json" + "strings" + + "github.com/pkg/errors" +) + // SideType define side type of order type SideType string @@ -7,8 +14,48 @@ const ( SideTypeBuy = SideType("BUY") SideTypeSell = SideType("SELL") SideTypeSelf = SideType("SELF") + + // SideTypeBoth is only used for the configuration context + SideTypeBoth = SideType("BOTH") ) +var ErrInvalidSideType = errors.New("invalid side type") + +func StrToSideType(s string) (side SideType, err error) { + switch strings.ToLower(s) { + case "buy": + side = SideTypeBuy + + case "sell": + side = SideTypeSell + + case "both": + side = SideTypeBoth + + default: + err = ErrInvalidSideType + return side, err + + } + + return side, err +} + +func (side *SideType) UnmarshalJSON(data []byte) error { + var s string + if err := json.Unmarshal(data, &s); err != nil { + return err + } + + ss, err := StrToSideType(s) + if err != nil { + return err + } + + *side = ss + return nil +} + func (side SideType) Reverse() SideType { switch side { case SideTypeBuy: @@ -21,16 +68,20 @@ func (side SideType) Reverse() SideType { return side } +func (side SideType) String() string { + return string(side) +} + func (side SideType) Color() string { if side == SideTypeBuy { - return Green + return GreenColor } if side == SideTypeSell { - return Red + return RedColor } - return "#f0f0f0" + return GrayColor } func SideToColorName(side SideType) string { diff --git a/pkg/types/sliceorderbook.go b/pkg/types/sliceorderbook.go new file mode 100644 index 0000000000..777e30333b --- /dev/null +++ b/pkg/types/sliceorderbook.go @@ -0,0 +1,201 @@ +package types + +import ( + "fmt" + "strings" + "time" + + "github.com/pkg/errors" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +// SliceOrderBook is a general order book structure which could be used +// for RESTful responses and websocket stream parsing +//go:generate callbackgen -type SliceOrderBook +type SliceOrderBook struct { + Symbol string + Bids PriceVolumeSlice + Asks PriceVolumeSlice + + lastUpdateTime time.Time + + loadCallbacks []func(book *SliceOrderBook) + updateCallbacks []func(book *SliceOrderBook) +} + +func NewSliceOrderBook(symbol string) *SliceOrderBook { + return &SliceOrderBook{ + Symbol: symbol, + } +} + +func (b *SliceOrderBook) LastUpdateTime() time.Time { + return b.lastUpdateTime +} + +func (b *SliceOrderBook) Spread() (fixedpoint.Value, bool) { + bestBid, ok := b.BestBid() + if !ok { + return fixedpoint.Zero, false + } + + bestAsk, ok := b.BestAsk() + if !ok { + return fixedpoint.Zero, false + } + + return bestAsk.Price.Sub(bestBid.Price), true +} + +func (b *SliceOrderBook) BestBid() (PriceVolume, bool) { + if len(b.Bids) == 0 { + return PriceVolume{}, false + } + + return b.Bids[0], true +} + +func (b *SliceOrderBook) BestAsk() (PriceVolume, bool) { + if len(b.Asks) == 0 { + return PriceVolume{}, false + } + + return b.Asks[0], true +} + +func (b *SliceOrderBook) SideBook(sideType SideType) PriceVolumeSlice { + switch sideType { + + case SideTypeBuy: + return b.Bids + + case SideTypeSell: + return b.Asks + + default: + return nil + } +} + +func (b *SliceOrderBook) IsValid() (bool, error) { + bid, hasBid := b.BestBid() + ask, hasAsk := b.BestAsk() + + if !hasBid { + return false, errors.New("empty bids") + } + + if !hasAsk { + return false, errors.New("empty asks") + } + + if bid.Price.Compare(ask.Price) > 0 { + return false, fmt.Errorf("bid price %s > ask price %s", bid.Price.String(), ask.Price.String()) + } + + return true, nil +} + +func (b *SliceOrderBook) PriceVolumesBySide(side SideType) PriceVolumeSlice { + switch side { + + case SideTypeBuy: + return b.Bids.Copy() + + case SideTypeSell: + return b.Asks.Copy() + } + + return nil +} + +func (b *SliceOrderBook) updateAsks(pvs PriceVolumeSlice) { + for _, pv := range pvs { + if pv.Volume.IsZero() { + b.Asks = b.Asks.Remove(pv.Price, false) + } else { + b.Asks = b.Asks.Upsert(pv, false) + } + } +} + +func (b *SliceOrderBook) updateBids(pvs PriceVolumeSlice) { + for _, pv := range pvs { + if pv.Volume.IsZero() { + b.Bids = b.Bids.Remove(pv.Price, true) + } else { + b.Bids = b.Bids.Upsert(pv, true) + } + } +} + +func (b *SliceOrderBook) update(book SliceOrderBook) { + b.updateBids(book.Bids) + b.updateAsks(book.Asks) + b.lastUpdateTime = time.Now() +} + +func (b *SliceOrderBook) Reset() { + b.Bids = nil + b.Asks = nil +} + +func (b *SliceOrderBook) Load(book SliceOrderBook) { + b.Reset() + b.update(book) + b.EmitLoad(b) +} + +func (b *SliceOrderBook) Update(book SliceOrderBook) { + b.update(book) + b.EmitUpdate(b) +} + +func (b *SliceOrderBook) Print() { + fmt.Print(b.String()) +} + +func (b *SliceOrderBook) String() string { + sb := strings.Builder{} + + sb.WriteString("BOOK ") + sb.WriteString(b.Symbol) + sb.WriteString("\n") + + if len(b.Asks) > 0 { + sb.WriteString("ASKS:\n") + for i := len(b.Asks) - 1; i >= 0; i-- { + sb.WriteString("- ASK: ") + sb.WriteString(b.Asks[i].String()) + sb.WriteString("\n") + } + } + + if len(b.Bids) > 0 { + sb.WriteString("BIDS:\n") + for _, bid := range b.Bids { + sb.WriteString("- BID: ") + sb.WriteString(bid.String()) + sb.WriteString("\n") + } + } + + return sb.String() +} + +func (b *SliceOrderBook) CopyDepth(limit int) OrderBook { + var book SliceOrderBook + book.Symbol = b.Symbol + book.Bids = b.Bids.CopyDepth(limit) + book.Asks = b.Asks.CopyDepth(limit) + return &book +} + +func (b *SliceOrderBook) Copy() OrderBook { + var book SliceOrderBook + book.Symbol = b.Symbol + book.Bids = b.Bids.Copy() + book.Asks = b.Asks.Copy() + return &book +} diff --git a/pkg/types/sliceorderbook_callbacks.go b/pkg/types/sliceorderbook_callbacks.go new file mode 100644 index 0000000000..43aef7617f --- /dev/null +++ b/pkg/types/sliceorderbook_callbacks.go @@ -0,0 +1,25 @@ +// Code generated by "callbackgen -type SliceOrderBook"; DO NOT EDIT. + +package types + +import () + +func (b *SliceOrderBook) OnLoad(cb func(book *SliceOrderBook)) { + b.loadCallbacks = append(b.loadCallbacks, cb) +} + +func (b *SliceOrderBook) EmitLoad(book *SliceOrderBook) { + for _, cb := range b.loadCallbacks { + cb(book) + } +} + +func (b *SliceOrderBook) OnUpdate(cb func(book *SliceOrderBook)) { + b.updateCallbacks = append(b.updateCallbacks, cb) +} + +func (b *SliceOrderBook) EmitUpdate(book *SliceOrderBook) { + for _, cb := range b.updateCallbacks { + cb(book) + } +} diff --git a/pkg/types/sort.go b/pkg/types/sort.go new file mode 100644 index 0000000000..6893d52e39 --- /dev/null +++ b/pkg/types/sort.go @@ -0,0 +1,28 @@ +package types + +import ( + "sort" + "time" +) + +func SortTradesAscending(trades []Trade) []Trade { + sort.Slice(trades, func(i, j int) bool { + return trades[i].Time.Before(time.Time(trades[j].Time)) + }) + return trades +} + +func SortOrdersAscending(orders []Order) []Order { + sort.Slice(orders, func(i, j int) bool { + return orders[i].CreationTime.Time().Before(orders[j].CreationTime.Time()) + }) + return orders +} + +func SortKLinesAscending(klines []KLine) []KLine { + sort.Slice(klines, func(i, j int) bool { + return klines[i].StartTime.Unix() < klines[j].StartTime.Unix() + }) + + return klines +} diff --git a/pkg/types/sort_test.go b/pkg/types/sort_test.go new file mode 100644 index 0000000000..4e5171acea --- /dev/null +++ b/pkg/types/sort_test.go @@ -0,0 +1,31 @@ +package types + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestSortTradesAscending(t *testing.T) { + var trades = []Trade{ + { + ID: 1, + Symbol: "BTCUSDT", + Side: SideTypeBuy, + IsBuyer: false, + IsMaker: false, + Time: Time(time.Unix(2000, 0)), + }, + { + ID: 2, + Symbol: "BTCUSDT", + Side: SideTypeBuy, + IsBuyer: false, + IsMaker: false, + Time: Time(time.Unix(1000, 0)), + }, + } + trades = SortTradesAscending(trades) + assert.True(t, trades[0].Time.Before(trades[1].Time.Time())) +} diff --git a/pkg/types/standardstream_callbacks.go b/pkg/types/standardstream_callbacks.go index db33b748b6..19fd476907 100644 --- a/pkg/types/standardstream_callbacks.go +++ b/pkg/types/standardstream_callbacks.go @@ -4,99 +4,163 @@ package types import () -func (stream *StandardStream) OnConnect(cb func()) { - stream.connectCallbacks = append(stream.connectCallbacks, cb) +func (s *StandardStream) OnStart(cb func()) { + s.startCallbacks = append(s.startCallbacks, cb) } -func (stream *StandardStream) EmitConnect() { - for _, cb := range stream.connectCallbacks { +func (s *StandardStream) EmitStart() { + for _, cb := range s.startCallbacks { cb() } } -func (stream *StandardStream) OnTradeUpdate(cb func(trade Trade)) { - stream.tradeUpdateCallbacks = append(stream.tradeUpdateCallbacks, cb) +func (s *StandardStream) OnConnect(cb func()) { + s.connectCallbacks = append(s.connectCallbacks, cb) } -func (stream *StandardStream) EmitTradeUpdate(trade Trade) { - for _, cb := range stream.tradeUpdateCallbacks { +func (s *StandardStream) EmitConnect() { + for _, cb := range s.connectCallbacks { + cb() + } +} + +func (s *StandardStream) OnDisconnect(cb func()) { + s.disconnectCallbacks = append(s.disconnectCallbacks, cb) +} + +func (s *StandardStream) EmitDisconnect() { + for _, cb := range s.disconnectCallbacks { + cb() + } +} + +func (s *StandardStream) OnTradeUpdate(cb func(trade Trade)) { + s.tradeUpdateCallbacks = append(s.tradeUpdateCallbacks, cb) +} + +func (s *StandardStream) EmitTradeUpdate(trade Trade) { + for _, cb := range s.tradeUpdateCallbacks { cb(trade) } } -func (stream *StandardStream) OnOrderUpdate(cb func(order Order)) { - stream.orderUpdateCallbacks = append(stream.orderUpdateCallbacks, cb) +func (s *StandardStream) OnOrderUpdate(cb func(order Order)) { + s.orderUpdateCallbacks = append(s.orderUpdateCallbacks, cb) } -func (stream *StandardStream) EmitOrderUpdate(order Order) { - for _, cb := range stream.orderUpdateCallbacks { +func (s *StandardStream) EmitOrderUpdate(order Order) { + for _, cb := range s.orderUpdateCallbacks { cb(order) } } -func (stream *StandardStream) OnBalanceSnapshot(cb func(balances BalanceMap)) { - stream.balanceSnapshotCallbacks = append(stream.balanceSnapshotCallbacks, cb) +func (s *StandardStream) OnBalanceSnapshot(cb func(balances BalanceMap)) { + s.balanceSnapshotCallbacks = append(s.balanceSnapshotCallbacks, cb) } -func (stream *StandardStream) EmitBalanceSnapshot(balances BalanceMap) { - for _, cb := range stream.balanceSnapshotCallbacks { +func (s *StandardStream) EmitBalanceSnapshot(balances BalanceMap) { + for _, cb := range s.balanceSnapshotCallbacks { cb(balances) } } -func (stream *StandardStream) OnBalanceUpdate(cb func(balances BalanceMap)) { - stream.balanceUpdateCallbacks = append(stream.balanceUpdateCallbacks, cb) +func (s *StandardStream) OnBalanceUpdate(cb func(balances BalanceMap)) { + s.balanceUpdateCallbacks = append(s.balanceUpdateCallbacks, cb) } -func (stream *StandardStream) EmitBalanceUpdate(balances BalanceMap) { - for _, cb := range stream.balanceUpdateCallbacks { +func (s *StandardStream) EmitBalanceUpdate(balances BalanceMap) { + for _, cb := range s.balanceUpdateCallbacks { cb(balances) } } -func (stream *StandardStream) OnKLineClosed(cb func(kline KLine)) { - stream.kLineClosedCallbacks = append(stream.kLineClosedCallbacks, cb) +func (s *StandardStream) OnKLineClosed(cb func(kline KLine)) { + s.kLineClosedCallbacks = append(s.kLineClosedCallbacks, cb) } -func (stream *StandardStream) EmitKLineClosed(kline KLine) { - for _, cb := range stream.kLineClosedCallbacks { +func (s *StandardStream) EmitKLineClosed(kline KLine) { + for _, cb := range s.kLineClosedCallbacks { cb(kline) } } -func (stream *StandardStream) OnKLine(cb func(kline KLine)) { - stream.kLineCallbacks = append(stream.kLineCallbacks, cb) +func (s *StandardStream) OnKLine(cb func(kline KLine)) { + s.kLineCallbacks = append(s.kLineCallbacks, cb) } -func (stream *StandardStream) EmitKLine(kline KLine) { - for _, cb := range stream.kLineCallbacks { +func (s *StandardStream) EmitKLine(kline KLine) { + for _, cb := range s.kLineCallbacks { cb(kline) } } -func (stream *StandardStream) OnBookUpdate(cb func(book OrderBook)) { - stream.bookUpdateCallbacks = append(stream.bookUpdateCallbacks, cb) +func (s *StandardStream) OnBookUpdate(cb func(book SliceOrderBook)) { + s.bookUpdateCallbacks = append(s.bookUpdateCallbacks, cb) } -func (stream *StandardStream) EmitBookUpdate(book OrderBook) { - for _, cb := range stream.bookUpdateCallbacks { +func (s *StandardStream) EmitBookUpdate(book SliceOrderBook) { + for _, cb := range s.bookUpdateCallbacks { cb(book) } } -func (stream *StandardStream) OnBookSnapshot(cb func(book OrderBook)) { - stream.bookSnapshotCallbacks = append(stream.bookSnapshotCallbacks, cb) +func (s *StandardStream) OnBookTickerUpdate(cb func(bookTicker BookTicker)) { + s.bookTickerUpdateCallbacks = append(s.bookTickerUpdateCallbacks, cb) +} + +func (s *StandardStream) EmitBookTickerUpdate(bookTicker BookTicker) { + for _, cb := range s.bookTickerUpdateCallbacks { + cb(bookTicker) + } +} + +func (s *StandardStream) OnBookSnapshot(cb func(book SliceOrderBook)) { + s.bookSnapshotCallbacks = append(s.bookSnapshotCallbacks, cb) } -func (stream *StandardStream) EmitBookSnapshot(book OrderBook) { - for _, cb := range stream.bookSnapshotCallbacks { +func (s *StandardStream) EmitBookSnapshot(book SliceOrderBook) { + for _, cb := range s.bookSnapshotCallbacks { cb(book) } } +func (s *StandardStream) OnMarketTrade(cb func(trade Trade)) { + s.marketTradeCallbacks = append(s.marketTradeCallbacks, cb) +} + +func (s *StandardStream) EmitMarketTrade(trade Trade) { + for _, cb := range s.marketTradeCallbacks { + cb(trade) + } +} + +func (s *StandardStream) OnFuturesPositionUpdate(cb func(futuresPositions FuturesPositionMap)) { + s.FuturesPositionUpdateCallbacks = append(s.FuturesPositionUpdateCallbacks, cb) +} + +func (s *StandardStream) EmitFuturesPositionUpdate(futuresPositions FuturesPositionMap) { + for _, cb := range s.FuturesPositionUpdateCallbacks { + cb(futuresPositions) + } +} + +func (s *StandardStream) OnFuturesPositionSnapshot(cb func(futuresPositions FuturesPositionMap)) { + s.FuturesPositionSnapshotCallbacks = append(s.FuturesPositionSnapshotCallbacks, cb) +} + +func (s *StandardStream) EmitFuturesPositionSnapshot(futuresPositions FuturesPositionMap) { + for _, cb := range s.FuturesPositionSnapshotCallbacks { + cb(futuresPositions) + } +} + type StandardStreamEventHub interface { + OnStart(cb func()) + OnConnect(cb func()) + OnDisconnect(cb func()) + OnTradeUpdate(cb func(trade Trade)) OnOrderUpdate(cb func(order Order)) @@ -109,7 +173,15 @@ type StandardStreamEventHub interface { OnKLine(cb func(kline KLine)) - OnBookUpdate(cb func(book OrderBook)) + OnBookUpdate(cb func(book SliceOrderBook)) + + OnBookTickerUpdate(cb func(bookTicker BookTicker)) + + OnBookSnapshot(cb func(book SliceOrderBook)) + + OnMarketTrade(cb func(trade Trade)) + + OnFuturesPositionUpdate(cb func(futuresPositions FuturesPositionMap)) - OnBookSnapshot(cb func(book OrderBook)) + OnFuturesPositionSnapshot(cb func(futuresPositions FuturesPositionMap)) } diff --git a/pkg/types/strategy_status.go b/pkg/types/strategy_status.go new file mode 100644 index 0000000000..00efde4841 --- /dev/null +++ b/pkg/types/strategy_status.go @@ -0,0 +1,10 @@ +package types + +// StrategyStatus define strategy status +type StrategyStatus string + +const ( + StrategyStatusRunning StrategyStatus = "RUNNING" + StrategyStatusStopped StrategyStatus = "STOPPED" + StrategyStatusUnknown StrategyStatus = "UNKNOWN" +) diff --git a/pkg/types/stream.go b/pkg/types/stream.go index c204a623a5..05a6b7dc62 100644 --- a/pkg/types/stream.go +++ b/pkg/types/stream.go @@ -2,29 +2,81 @@ package types import ( "context" + "net" + "net/http" + "sync" + "time" + + "github.com/gorilla/websocket" + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" ) +const pingInterval = 30 * time.Second +const readTimeout = 2 * time.Minute +const writeTimeout = 10 * time.Second +const reconnectCoolDownPeriod = 15 * time.Second + +var defaultDialer = &websocket.Dialer{ + Proxy: http.ProxyFromEnvironment, + HandshakeTimeout: 10 * time.Second, + ReadBufferSize: 4096, +} + type Stream interface { StandardStreamEventHub Subscribe(channel Channel, symbol string, options SubscribeOptions) + GetSubscriptions() []Subscription SetPublicOnly() + GetPublicOnly() bool Connect(ctx context.Context) error Close() error } -type Channel string +type EndpointCreator func(ctx context.Context) (string, error) -var BookChannel = Channel("book") +type Parser func(message []byte) (interface{}, error) -var KLineChannel = Channel("kline") +type Dispatcher func(e interface{}) //go:generate callbackgen -type StandardStream -interface type StandardStream struct { + parser Parser + dispatcher Dispatcher + + endpointCreator EndpointCreator + + // Conn is the websocket connection + Conn *websocket.Conn + + // ConnCtx is the context of the current websocket connection + ConnCtx context.Context + + // ConnCancel is the cancel funcion of the current websocket connection + ConnCancel context.CancelFunc + + // ConnLock is used for locking Conn, ConnCtx and ConnCancel fields. + // When changing these field values, be sure to call ConnLock + ConnLock sync.Mutex + + PublicOnly bool + + // ReconnectC is a signal channel for reconnecting + ReconnectC chan struct{} + + // CloseC is a signal channel for closing stream + CloseC chan struct{} + Subscriptions []Subscription + startCallbacks []func() + connectCallbacks []func() + disconnectCallbacks []func() + // private trade update callbacks tradeUpdateCallbacks []func(trade Trade) @@ -40,35 +92,368 @@ type StandardStream struct { kLineCallbacks []func(kline KLine) - bookUpdateCallbacks []func(book OrderBook) + bookUpdateCallbacks []func(book SliceOrderBook) + + bookTickerUpdateCallbacks []func(bookTicker BookTicker) + + bookSnapshotCallbacks []func(book SliceOrderBook) + + marketTradeCallbacks []func(trade Trade) + + // Futures + FuturesPositionUpdateCallbacks []func(futuresPositions FuturesPositionMap) + + FuturesPositionSnapshotCallbacks []func(futuresPositions FuturesPositionMap) +} + +type StandardStreamEmitter interface { + Stream + EmitStart() + EmitConnect() + EmitDisconnect() + EmitTradeUpdate(Trade) + EmitOrderUpdate(Order) + EmitBalanceSnapshot(BalanceMap) + EmitBalanceUpdate(BalanceMap) + EmitKLineClosed(KLine) + EmitKLine(KLine) + EmitBookUpdate(SliceOrderBook) + EmitBookTickerUpdate(BookTicker) + EmitBookSnapshot(SliceOrderBook) + EmitMarketTrade(Trade) + EmitFuturesPositionUpdate(FuturesPositionMap) + EmitFuturesPositionSnapshot(FuturesPositionMap) +} + +func NewStandardStream() StandardStream { + return StandardStream{ + ReconnectC: make(chan struct{}, 1), + CloseC: make(chan struct{}), + } +} + +func (s *StandardStream) SetPublicOnly() { + s.PublicOnly = true +} + +func (s *StandardStream) GetPublicOnly() bool { + return s.PublicOnly +} + +func (s *StandardStream) SetEndpointCreator(creator EndpointCreator) { + s.endpointCreator = creator +} + +func (s *StandardStream) SetDispatcher(dispatcher Dispatcher) { + s.dispatcher = dispatcher +} + +func (s *StandardStream) SetParser(parser Parser) { + s.parser = parser +} + +func (s *StandardStream) SetConn(ctx context.Context, conn *websocket.Conn) (context.Context, context.CancelFunc) { + // should only start one connection one time, so we lock the mutex + connCtx, connCancel := context.WithCancel(ctx) + s.ConnLock.Lock() + + // ensure the previous context is cancelled + if s.ConnCancel != nil { + s.ConnCancel() + } + + // create a new context for this connection + s.Conn = conn + s.ConnCtx = connCtx + s.ConnCancel = connCancel + s.ConnLock.Unlock() + return connCtx, connCancel +} + +func (s *StandardStream) Read(ctx context.Context, conn *websocket.Conn, cancel context.CancelFunc) { + defer func() { + cancel() + s.EmitDisconnect() + }() + + // flag format: debug-{component}-{message type} + debugRawMessage := viper.GetBool("debug-websocket-raw-message") + + for { + select { + + case <-ctx.Done(): + return + + case <-s.CloseC: + return + + default: + if err := conn.SetReadDeadline(time.Now().Add(readTimeout)); err != nil { + log.WithError(err).Errorf("set read deadline error: %s", err.Error()) + } + + mt, message, err := conn.ReadMessage() + if err != nil { + // if it's a network timeout error, we should re-connect + switch err := err.(type) { + + // if it's a websocket related error + case *websocket.CloseError: + if err.Code == websocket.CloseNormalClosure { + return + } + + log.WithError(err).Errorf("websocket error abnormal close: %+v", err) + + _ = conn.Close() + // for unexpected close error, we should re-connect + // emit reconnect to start a new connection + s.Reconnect() + return + + case net.Error: + log.WithError(err).Error("websocket read network error") + _ = conn.Close() + s.Reconnect() + return + + default: + log.WithError(err).Error("unexpected websocket error") + _ = conn.Close() + s.Reconnect() + return + } + } + + // skip non-text messages + if mt != websocket.TextMessage { + continue + } + + if debugRawMessage { + log.Info(string(message)) + } + + var e interface{} + if s.parser != nil { + e, err = s.parser(message) + if err != nil { + log.WithError(err).Errorf("websocket event parse error") + continue + } + } - bookSnapshotCallbacks []func(book OrderBook) + if s.dispatcher != nil { + s.dispatcher(e) + } + } + } +} + +func (s *StandardStream) ping(ctx context.Context, conn *websocket.Conn, cancel context.CancelFunc, interval time.Duration) { + defer func() { + cancel() + log.Debug("[websocket] ping worker stopped") + }() + + var pingTicker = time.NewTicker(interval) + defer pingTicker.Stop() + + for { + select { + + case <-ctx.Done(): + return + + case <-s.CloseC: + return + + case <-pingTicker.C: + log.Debugf("[websocket] -> ping") + if err := conn.WriteControl(websocket.PingMessage, nil, time.Now().Add(writeTimeout)); err != nil { + log.WithError(err).Error("ping error", err) + s.Reconnect() + } + } + } +} + +func (s *StandardStream) GetSubscriptions() []Subscription { + return s.Subscriptions } -func (stream *StandardStream) Subscribe(channel Channel, symbol string, options SubscribeOptions) { - stream.Subscriptions = append(stream.Subscriptions, Subscription{ +func (s *StandardStream) Subscribe(channel Channel, symbol string, options SubscribeOptions) { + s.Subscriptions = append(s.Subscriptions, Subscription{ Channel: channel, Symbol: symbol, Options: options, }) } +func (s *StandardStream) Reconnect() { + select { + case s.ReconnectC <- struct{}{}: + default: + } +} + +// Connect starts the stream and create the websocket connection +func (s *StandardStream) Connect(ctx context.Context) error { + err := s.DialAndConnect(ctx) + if err != nil { + return err + } + + // start one re-connector goroutine with the base context + go s.reconnector(ctx) + + s.EmitStart() + return nil +} + +func (s *StandardStream) reconnector(ctx context.Context) { + for { + select { + + case <-ctx.Done(): + return + + case <-s.CloseC: + return + + case <-s.ReconnectC: + log.Warnf("received reconnect signal, cooling for %s...", reconnectCoolDownPeriod) + time.Sleep(reconnectCoolDownPeriod) + + log.Warnf("re-connecting...") + if err := s.DialAndConnect(ctx); err != nil { + log.WithError(err).Errorf("re-connect error, try to reconnect later") + + // re-emit the re-connect signal if error + s.Reconnect() + } + } + } +} + +func (s *StandardStream) DialAndConnect(ctx context.Context) error { + conn, err := s.Dial(ctx) + if err != nil { + return err + } + + connCtx, connCancel := s.SetConn(ctx, conn) + s.EmitConnect() + + go s.Read(connCtx, conn, connCancel) + go s.ping(connCtx, conn, connCancel, pingInterval) + return nil +} + +func (s *StandardStream) Dial(ctx context.Context, args ...string) (*websocket.Conn, error) { + var url string + var err error + if len(args) > 0 { + url = args[0] + } else if s.endpointCreator != nil { + url, err = s.endpointCreator(ctx) + if err != nil { + return nil, errors.Wrap(err, "can not dial, can not create endpoint via the endpoint creator") + } + } else { + return nil, errors.New("can not dial, neither url nor endpoint creator is not defined, you should pass an url to Dial() or call SetEndpointCreator()") + } + + conn, _, err := defaultDialer.Dial(url, nil) + if err != nil { + return nil, err + } + + // use the default ping handler + // The websocket server will send a ping frame every 3 minutes. + // If the websocket server does not receive a pong frame back from the connection within a 10 minutes period, + // the connection will be disconnected. + // Unsolicited pong frames are allowed. + conn.SetPingHandler(nil) + conn.SetPongHandler(func(string) error { + log.Debugf("[websocket] <- received pong") + if err := conn.SetReadDeadline(time.Now().Add(readTimeout * 2)); err != nil { + log.WithError(err).Error("pong handler can not set read deadline") + } + return nil + }) + + log.Infof("[websocket] connected, public = %v, read timeout = %v", s.PublicOnly, readTimeout) + return conn, nil +} + +func (s *StandardStream) Close() error { + log.Debugf("[websocket] closing stream...") + + // close the close signal channel, so that reader and ping worker will stop + close(s.CloseC) + + // get the connection object before call the context cancel function + s.ConnLock.Lock() + conn := s.Conn + connCancel := s.ConnCancel + s.ConnLock.Unlock() + + // cancel the context so that the ticker loop and listen key updater will be stopped. + if connCancel != nil { + connCancel() + } + + // gracefully write the close message to the connection + err := conn.WriteMessage(websocket.CloseMessage, websocket.FormatCloseMessage(websocket.CloseNormalClosure, "")) + if err != nil { + return errors.Wrap(err, "websocket write close message error") + } + + log.Debugf("[websocket] stream closed") + + // let the reader close the connection + <-time.After(time.Second) + return nil +} + +type Depth string + +const ( + DepthLevelFull Depth = "FULL" + DepthLevelMedium Depth = "MEDIUM" + DepthLevel1 Depth = "1" + DepthLevel5 Depth = "5" + DepthLevel20 Depth = "20" +) + +type Speed string + +const ( + SpeedHigh Speed = "HIGH" + SpeedMedium Speed = "MEDIUM" + SpeedLow Speed = "LOW" +) + // SubscribeOptions provides the standard stream options type SubscribeOptions struct { - Interval string - Depth string + // TODO: change to Interval type later + Interval Interval `json:"interval,omitempty"` + Depth Depth `json:"depth,omitempty"` + Speed Speed `json:"speed,omitempty"` } func (o SubscribeOptions) String() string { if len(o.Interval) > 0 { - return o.Interval + return string(o.Interval) } - return o.Depth + return string(o.Depth) } type Subscription struct { - Symbol string - Channel Channel - Options SubscribeOptions + Symbol string `json:"symbol"` + Channel Channel `json:"channel"` + Options SubscribeOptions `json:"options"` } diff --git a/pkg/types/ticker.go b/pkg/types/ticker.go new file mode 100644 index 0000000000..a1649f4194 --- /dev/null +++ b/pkg/types/ticker.go @@ -0,0 +1,17 @@ +package types + +import ( + "github.com/c9s/bbgo/pkg/fixedpoint" + "time" +) + +type Ticker struct { + Time time.Time + Volume fixedpoint.Value // `volume` from Max & binance + Last fixedpoint.Value // `last` from Max, `lastPrice` from binance + Open fixedpoint.Value // `open` from Max, `openPrice` from binance + High fixedpoint.Value // `high` from Max, `highPrice` from binance + Low fixedpoint.Value // `low` from Max, `lowPrice` from binance + Buy fixedpoint.Value // `buy` from Max, `bidPrice` from binance + Sell fixedpoint.Value // `sell` from Max, `askPrice` from binance +} diff --git a/pkg/types/time.go b/pkg/types/time.go new file mode 100644 index 0000000000..2648d08af4 --- /dev/null +++ b/pkg/types/time.go @@ -0,0 +1,309 @@ +package types + +import ( + "database/sql/driver" + "encoding/json" + "fmt" + "strconv" + "strings" + "time" + + "github.com/c9s/bbgo/pkg/util" +) + +var numOfDigitsOfUnixTimestamp = len(strconv.FormatInt(time.Now().Unix(), 10)) +var numOfDigitsOfMilliSecondUnixTimestamp = len(strconv.FormatInt(time.Now().UnixNano()/int64(time.Millisecond), 10)) +var numOfDigitsOfNanoSecondsUnixTimestamp = len(strconv.FormatInt(time.Now().UnixNano(), 10)) + +type NanosecondTimestamp time.Time + +func (t NanosecondTimestamp) Time() time.Time { + return time.Time(t) +} + +func (t *NanosecondTimestamp) UnmarshalJSON(data []byte) error { + var v int64 + + var err = json.Unmarshal(data, &v) + if err != nil { + return err + } + + *t = NanosecondTimestamp(time.Unix(0, v)) + return nil +} + +type MillisecondTimestamp time.Time + +func NewMillisecondTimestampFromInt(i int64) MillisecondTimestamp { + return MillisecondTimestamp(time.Unix(0, i*int64(time.Millisecond))) +} + +func MustParseMillisecondTimestamp(a string) MillisecondTimestamp { + m, err := strconv.ParseInt(a, 10, 64) // startTime + if err != nil { + panic(fmt.Errorf("millisecond timestamp parse error %v", err)) + } + + return NewMillisecondTimestampFromInt(m) +} + +func MustParseUnixTimestamp(a string) time.Time { + m, err := strconv.ParseInt(a, 10, 64) // startTime + if err != nil { + panic(fmt.Errorf("millisecond timestamp parse error %v", err)) + } + + return time.Unix(m, 0) +} + +func (t MillisecondTimestamp) String() string { + return time.Time(t).String() +} + +func (t MillisecondTimestamp) Time() time.Time { + return time.Time(t) +} + +func (t *MillisecondTimestamp) UnmarshalJSON(data []byte) error { + var v interface{} + + var err = json.Unmarshal(data, &v) + if err != nil { + return err + } + + switch vt := v.(type) { + case string: + if vt == "" { + // treat empty string as 0 + *t = MillisecondTimestamp(time.Time{}) + return nil + } + + f, err := strconv.ParseFloat(vt, 64) + if err == nil { + tt, err := convertFloat64ToTime(vt, f) + if err != nil { + return err + } + + *t = MillisecondTimestamp(tt) + return nil + } + + tt, err := time.Parse(time.RFC3339Nano, vt) + if err == nil { + *t = MillisecondTimestamp(tt) + return nil + } + + return err + + case float64: + str := strconv.FormatFloat(vt, 'f', -1, 64) + tt, err := convertFloat64ToTime(str, vt) + if err != nil { + return err + } + + *t = MillisecondTimestamp(tt) + return nil + + default: + return fmt.Errorf("can not parse %T %+v as millisecond timestamp", vt, vt) + + } + + // Unreachable +} + +func convertFloat64ToTime(vt string, f float64) (time.Time, error) { + idx := strings.Index(vt, ".") + if idx > 0 { + vt = vt[0 : idx-1] + } + + if len(vt) <= numOfDigitsOfUnixTimestamp { + return time.Unix(0, int64(f*float64(time.Second))), nil + } else if len(vt) <= numOfDigitsOfMilliSecondUnixTimestamp { + return time.Unix(0, int64(f)*int64(time.Millisecond)), nil + } else if len(vt) <= numOfDigitsOfNanoSecondsUnixTimestamp { + return time.Unix(0, int64(f)), nil + } + + return time.Time{}, fmt.Errorf("the floating point value %f is out of the timestamp range", f) +} + +// Time type implements the driver value for sqlite +type Time time.Time + +var layout = "2006-01-02 15:04:05.999Z07:00" + +func (t *Time) UnmarshalJSON(data []byte) error { + // fallback to RFC3339 + return (*time.Time)(t).UnmarshalJSON(data) +} + +func (t Time) MarshalJSON() ([]byte, error) { + return time.Time(t).MarshalJSON() +} + +func (t Time) String() string { + return time.Time(t).String() +} + +func (t Time) Time() time.Time { + return time.Time(t) +} + +func (t Time) Unix() int64 { + return time.Time(t).Unix() +} + +func (t Time) UnixMilli() int64 { + return time.Time(t).UnixMilli() +} + +func (t Time) After(time2 time.Time) bool { + return time.Time(t).After(time2) +} + +func (t Time) Before(time2 time.Time) bool { + return time.Time(t).Before(time2) +} + +func NewTimeFromUnix(sec int64, nsec int64) Time { + return Time(time.Unix(sec, nsec)) +} + +// Value implements the driver.Valuer interface +// see http://jmoiron.net/blog/built-in-interfaces/ +func (t Time) Value() (driver.Value, error) { + if time.Time(t) == (time.Time{}) { + return nil, nil + } + return time.Time(t), nil +} + +func (t *Time) Scan(src interface{}) error { + // skip nil time + if src == nil { + return nil + } + + switch d := src.(type) { + + case *time.Time: + *t = Time(*d) + return nil + + case time.Time: + *t = Time(d) + return nil + + case string: + // 2020-12-16 05:17:12.994+08:00 + tt, err := time.Parse(layout, d) + if err != nil { + return err + } + + *t = Time(tt) + return nil + + case []byte: + // 2019-10-20 23:01:43.77+08:00 + tt, err := time.Parse(layout, string(d)) + if err != nil { + return err + } + + *t = Time(tt) + return nil + + default: + + } + + return fmt.Errorf("datatype.Time scan error, type: %T is not supported, value; %+v", src, src) +} + +var looseTimeFormats = []string{ + time.RFC3339, + time.RFC822, + "2006-01-02T15:04:05", + "2006-01-02", +} + +// LooseFormatTime parses date time string with a wide range of formats. +type LooseFormatTime time.Time + +func (t *LooseFormatTime) UnmarshalYAML(unmarshal func(interface{}) error) error { + var str string + if err := unmarshal(&str); err != nil { + return err + } + + tv, err := util.ParseTimeWithFormats(str, looseTimeFormats) + if err != nil { + return err + } + + *t = LooseFormatTime(tv) + return nil +} + +func (t *LooseFormatTime) UnmarshalJSON(data []byte) error { + var v string + err := json.Unmarshal(data, &v) + if err != nil { + return err + } + + tv, err := util.ParseTimeWithFormats(v, looseTimeFormats) + if err != nil { + return err + } + + *t = LooseFormatTime(tv) + return nil +} + +func (t LooseFormatTime) MarshalJSON() ([]byte, error) { + return []byte(strconv.Quote(time.Time(t).Format(time.RFC3339))), nil +} + +func (t LooseFormatTime) Time() time.Time { + return time.Time(t) +} + +// Timestamp is used for parsing unix timestamp (seconds) +type Timestamp time.Time + +func (t Timestamp) Format(layout string) string { + return time.Time(t).Format(layout) +} + +func (t Timestamp) Time() time.Time { + return time.Time(t) +} + +func (t Timestamp) String() string { + return time.Time(t).String() +} + +func (t Timestamp) MarshalJSON() ([]byte, error) { + ts := time.Time(t).Unix() + return json.Marshal(ts) +} + +func (t *Timestamp) UnmarshalJSON(o []byte) error { + var timestamp int64 + if err := json.Unmarshal(o, ×tamp); err != nil { + return err + } + + *t = Timestamp(time.Unix(timestamp, 0)) + return nil +} diff --git a/pkg/types/time_test.go b/pkg/types/time_test.go new file mode 100644 index 0000000000..ae21984cf3 --- /dev/null +++ b/pkg/types/time_test.go @@ -0,0 +1,73 @@ +package types + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestLooseFormatTime_UnmarshalJSON(t *testing.T) { + tests := []struct { + name string + t LooseFormatTime + args []byte + wantErr bool + }{ + { + name: "simple date", + args: []byte("\"2021-01-01\""), + t: LooseFormatTime(time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC)), + }, + { + name: "utc", + args: []byte("\"2021-01-01T12:10:10\""), + t: LooseFormatTime(time.Date(2021, 1, 1, 12, 10, 10, 0, time.UTC)), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var v LooseFormatTime + if err := v.UnmarshalJSON(tt.args); (err != nil) != tt.wantErr { + t.Errorf("UnmarshalJSON() error = %v, wantErr %v", err, tt.wantErr) + } else { + assert.Equal(t, v.Time(), tt.t.Time()) + } + }) + } +} + +func TestMillisecondTimestamp_UnmarshalJSON(t *testing.T) { + tests := []struct { + name string + t MillisecondTimestamp + args []byte + wantErr bool + }{ + { + name: "millisecond in string", + args: []byte("\"1620289117764\""), + t: MillisecondTimestamp(time.Unix(0, 1620289117764*int64(time.Millisecond))), + }, + { + name: "millisecond in number", + args: []byte("1620289117764"), + t: MillisecondTimestamp(time.Unix(0, 1620289117764*int64(time.Millisecond))), + }, + { + name: "millisecond in decimal", + args: []byte("1620289117.764"), + t: MillisecondTimestamp(time.Unix(0, 1620289117764*int64(time.Millisecond))), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var v MillisecondTimestamp + if err := v.UnmarshalJSON(tt.args); (err != nil) != tt.wantErr { + t.Errorf("UnmarshalJSON() error = %v, wantErr %v", err, tt.wantErr) + } else { + assert.Equal(t, tt.t.Time(), v.Time()) + } + }) + } +} diff --git a/pkg/types/trade.go b/pkg/types/trade.go index 56e82e4098..0db1fd3143 100644 --- a/pkg/types/trade.go +++ b/pkg/types/trade.go @@ -1,12 +1,16 @@ package types import ( + "database/sql" "fmt" + "strconv" + "strings" "sync" "time" "github.com/slack-go/slack" + "github.com/c9s/bbgo/pkg/fixedpoint" "github.com/c9s/bbgo/pkg/util" ) @@ -23,13 +27,20 @@ type TradeSlice struct { func (s *TradeSlice) Copy() []Trade { s.mu.Lock() - slice := make([]Trade, len(s.Trades), len(s.Trades)) + slice := make([]Trade, len(s.Trades)) copy(slice, s.Trades) s.mu.Unlock() return slice } +func (s *TradeSlice) Reverse() { + slice := s.Trades + for i, j := 0, len(slice)-1; i < j; i, j = i+1, j-1 { + slice[i], slice[j] = slice[j], slice[i] + } +} + func (s *TradeSlice) Append(t Trade) { s.mu.Lock() s.Trades = append(s.Trades, t) @@ -41,35 +52,129 @@ type Trade struct { GID int64 `json:"gid" db:"gid"` // ID is the source trade ID - ID int64 `json:"id" db:"id"` - OrderID uint64 `json:"orderID" db:"order_id"` - Exchange string `json:"exchange" db:"exchange"` - Price float64 `json:"price" db:"price"` - Quantity float64 `json:"quantity" db:"quantity"` - QuoteQuantity float64 `json:"quoteQuantity" db:"quote_quantity"` - Symbol string `json:"symbol" db:"symbol"` - - Side SideType `json:"side" db:"side"` - IsBuyer bool `json:"isBuyer" db:"is_buyer"` - IsMaker bool `json:"isMaker" db:"is_maker"` - Time time.Time `json:"tradedAt" db:"traded_at"` - Fee float64 `json:"fee" db:"fee"` - FeeCurrency string `json:"feeCurrency" db:"fee_currency"` + ID uint64 `json:"id" db:"id"` + OrderID uint64 `json:"orderID" db:"order_id"` + Exchange ExchangeName `json:"exchange" db:"exchange"` + Price fixedpoint.Value `json:"price" db:"price"` + Quantity fixedpoint.Value `json:"quantity" db:"quantity"` + QuoteQuantity fixedpoint.Value `json:"quoteQuantity" db:"quote_quantity"` + Symbol string `json:"symbol" db:"symbol"` + + Side SideType `json:"side" db:"side"` + IsBuyer bool `json:"isBuyer" db:"is_buyer"` + IsMaker bool `json:"isMaker" db:"is_maker"` + Time Time `json:"tradedAt" db:"traded_at"` + Fee fixedpoint.Value `json:"fee" db:"fee"` + FeeCurrency string `json:"feeCurrency" db:"fee_currency"` IsMargin bool `json:"isMargin" db:"is_margin"` + IsFutures bool `json:"isFutures" db:"is_futures"` IsIsolated bool `json:"isIsolated" db:"is_isolated"` + + // The following fields are null-able fields + + // StrategyID is the strategy that execute this trade + StrategyID sql.NullString `json:"strategyID" db:"strategy"` + + // PnL is the profit and loss value of the executed trade + PnL sql.NullFloat64 `json:"pnl" db:"pnl"` +} + +func (trade Trade) CsvHeader() []string { + return []string{"id", "order_id", "exchange", "symbol", "price", "quantity", "quote_quantity", "side", "is_buyer", "is_maker", "fee", "fee_currency", "time"} +} + +func (trade Trade) CsvRecords() [][]string { + return [][]string{ + { + strconv.FormatUint(trade.ID, 10), + strconv.FormatUint(trade.OrderID, 10), + trade.Exchange.String(), + trade.Symbol, + trade.Price.String(), + trade.Quantity.String(), + trade.QuoteQuantity.String(), + trade.Side.String(), + strconv.FormatBool(trade.IsBuyer), + strconv.FormatBool(trade.IsMaker), + trade.Fee.String(), + trade.FeeCurrency, + trade.Time.Time().Format(time.RFC1123), + }, + } +} + +func (trade Trade) PositionChange() fixedpoint.Value { + q := trade.Quantity + switch trade.Side { + case SideTypeSell: + return q.Neg() + + case SideTypeBuy: + return q + + case SideTypeSelf: + return fixedpoint.Zero + } + return fixedpoint.Zero +} + +/*func trimTrailingZero(a string) string { + index := strings.Index(a, ".") + if index == -1 { + return a + } + + var c byte + var i int + for i = len(a) - 1; i >= 0; i-- { + c = a[i] + if c == '0' { + continue + } else if c == '.' { + return a[0:i] + } else { + return a[0 : i+1] + } + } + return a +} + +func trimTrailingZero(a float64) string { + return trimTrailingZero(fmt.Sprintf("%f", a)) +}*/ + +// String is for console output +func (trade Trade) String() string { + return fmt.Sprintf("TRADE %s %s %4s %-4s @ %-6s | AMOUNT %s | FEE %s %s | OrderID %d | %s", + trade.Exchange.String(), + trade.Symbol, + trade.Side, + trade.Quantity.String(), + trade.Price.String(), + trade.QuoteQuantity.String(), + trade.Fee.String(), + trade.FeeCurrency, + trade.OrderID, + trade.Time.Time().Format(time.StampMilli), + ) } +// PlainText is used for telegram-styled messages func (trade Trade) PlainText() string { - return fmt.Sprintf("%s Trade %s %s price %s, quantity %s, amount %s", - trade.Exchange, + return fmt.Sprintf("Trade %s %s %s %s @ %s, amount %s, fee %s %s", + trade.Exchange.String(), trade.Symbol, trade.Side, - util.FormatFloat(trade.Price, 2), - util.FormatFloat(trade.Quantity, 4), - util.FormatFloat(trade.QuoteQuantity, 2)) + trade.Quantity.String(), + trade.Price.String(), + trade.QuoteQuantity.String(), + trade.Fee.String(), + trade.FeeCurrency) } +var slackTradeTextTemplate = ":handshake: Trade {{ .Symbol }} {{ .Side }} {{ .Quantity }} @ {{ .Price }}" + func (trade Trade) SlackAttachment() slack.Attachment { var color = "#DC143C" @@ -77,20 +182,54 @@ func (trade Trade) SlackAttachment() slack.Attachment { color = "#228B22" } + liquidity := trade.Liquidity() + text := util.Render(slackTradeTextTemplate, trade) + footerIcon := ExchangeFooterIcon(trade.Exchange) + return slack.Attachment{ - Text: fmt.Sprintf("*%s* Trade %s", trade.Symbol, trade.Side), + Text: text, + // Title: ... + // Pretext: pretext, Color: color, - // Pretext: "", - // Text: "", Fields: []slack.AttachmentField{ - {Title: "Exchange", Value: trade.Exchange, Short: true}, - {Title: "Price", Value: util.FormatFloat(trade.Price, 2), Short: true}, - {Title: "Volume", Value: util.FormatFloat(trade.Quantity, 4), Short: true}, - {Title: "Amount", Value: util.FormatFloat(trade.QuoteQuantity, 2)}, - {Title: "Fee", Value: util.FormatFloat(trade.Fee, 4), Short: true}, + {Title: "Exchange", Value: trade.Exchange.String(), Short: true}, + {Title: "Price", Value: trade.Price.String(), Short: true}, + {Title: "Quantity", Value: trade.Quantity.String(), Short: true}, + {Title: "QuoteQuantity", Value: trade.QuoteQuantity.String(), Short: true}, + {Title: "Fee", Value: trade.Fee.String(), Short: true}, {Title: "FeeCurrency", Value: trade.FeeCurrency, Short: true}, + {Title: "Liquidity", Value: liquidity, Short: true}, + {Title: "Order ID", Value: strconv.FormatUint(trade.OrderID, 10), Short: true}, }, - // Footer: tradingCtx.TradeStartTime.Format(time.RFC822), - // FooterIcon: "", + FooterIcon: footerIcon, + Footer: strings.ToLower(trade.Exchange.String()) + util.Render(" creation time {{ . }}", trade.Time.Time().Format(time.StampMilli)), + } +} + +func (trade Trade) Liquidity() (o string) { + if trade.IsMaker { + o = "MAKER" + } else { + o = "TAKER" + } + + return o +} + +func (trade Trade) Key() TradeKey { + return TradeKey{ + Exchange: trade.Exchange, + ID: trade.ID, + Side: trade.Side, } } + +type TradeKey struct { + Exchange ExchangeName + ID uint64 + Side SideType +} + +func (k TradeKey) String() string { + return k.Exchange.String() + strconv.FormatUint(k.ID, 10) + k.Side.String() +} diff --git a/pkg/types/trade_stats.go b/pkg/types/trade_stats.go new file mode 100644 index 0000000000..9c0095ed6a --- /dev/null +++ b/pkg/types/trade_stats.go @@ -0,0 +1,44 @@ +package types + +import ( + "gopkg.in/yaml.v3" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +type TradeStats struct { + WinningRatio fixedpoint.Value `json:"winningRatio" yaml:"winningRatio"` + NumOfLossTrade int `json:"numOfLossTrade" yaml:"numOfLossTrade"` + NumOfProfitTrade int `json:"numOfProfitTrade" yaml:"numOfProfitTrade"` + GrossProfit fixedpoint.Value `json:"grossProfit" yaml:"grossProfit"` + GrossLoss fixedpoint.Value `json:"grossLoss" yaml:"grossLoss"` + Profits []fixedpoint.Value `json:"profits" yaml:"profits"` + Losses []fixedpoint.Value `json:"losses" yaml:"losses"` + MostProfitableTrade fixedpoint.Value `json:"mostProfitableTrade" yaml:"mostProfitableTrade"` + MostLossTrade fixedpoint.Value `json:"mostLossTrade" yaml:"mostLossTrade"` +} + +func (s *TradeStats) Add(pnl fixedpoint.Value) { + if pnl.Sign() > 0 { + s.NumOfProfitTrade++ + s.Profits = append(s.Profits, pnl) + s.GrossProfit = s.GrossProfit.Add(pnl) + s.MostProfitableTrade = fixedpoint.Max(s.MostProfitableTrade, pnl) + } else { + s.NumOfLossTrade++ + s.Losses = append(s.Losses, pnl) + s.GrossLoss = s.GrossLoss.Add(pnl) + s.MostLossTrade = fixedpoint.Min(s.MostLossTrade, pnl) + } + + if s.NumOfLossTrade == 0 && s.NumOfProfitTrade > 0 { + s.WinningRatio = fixedpoint.One + } else { + s.WinningRatio = fixedpoint.NewFromFloat(float64(s.NumOfProfitTrade) / float64(s.NumOfLossTrade)) + } +} + +func (s *TradeStats) String() string { + out, _ := yaml.Marshal(s) + return string(out) +} diff --git a/pkg/types/trade_test.go b/pkg/types/trade_test.go new file mode 100644 index 0000000000..d87a2c6702 --- /dev/null +++ b/pkg/types/trade_test.go @@ -0,0 +1,51 @@ +package types + +import "testing" +import "github.com/c9s/bbgo/pkg/fixedpoint" + +func Test_trimTrailingZero(t *testing.T) { + type args struct { + a string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "trailing floating zero", + args: args{ + a: "1.23400000", + }, + want: "1.234", + }, + { + name: "trailing zero of an integer", + args: args{ + a: "1.00000", + }, + want: "1", + }, + { + name: "non trailing zero", + args: args{ + a: "1.00012345", + }, + want: "1.00012345", + }, + { + name: "integer", + args: args{ + a: "1200000", + }, + want: "1200000", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := fixedpoint.MustNewFromString(tt.args.a).String(); got != tt.want { + t.Errorf("trimTrailingZero() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/types/value_map.go b/pkg/types/value_map.go new file mode 100644 index 0000000000..9d67a68e11 --- /dev/null +++ b/pkg/types/value_map.go @@ -0,0 +1,157 @@ +package types + +import "github.com/c9s/bbgo/pkg/fixedpoint" + +type ValueMap map[string]fixedpoint.Value + +func (m ValueMap) Eq(n ValueMap) bool { + if len(m) != len(n) { + return false + } + + for m_k, m_v := range m { + n_v, ok := n[m_k] + if !ok { + return false + } + + if !m_v.Eq(n_v) { + return false + } + } + + return true +} + +func (m ValueMap) Add(n ValueMap) ValueMap { + if len(m) != len(n) { + panic("unequal length") + } + + o := ValueMap{} + + for m_k, m_v := range m { + n_v, ok := n[m_k] + if !ok { + panic("key not found") + } + + o[m_k] = m_v.Add(n_v) + } + + return o +} + +func (m ValueMap) Sub(n ValueMap) ValueMap { + if len(m) != len(n) { + panic("unequal length") + } + + o := ValueMap{} + + for m_k, m_v := range m { + n_v, ok := n[m_k] + if !ok { + panic("key not found") + } + + o[m_k] = m_v.Sub(n_v) + } + + return o +} + +func (m ValueMap) Mul(n ValueMap) ValueMap { + if len(m) != len(n) { + panic("unequal length") + } + + o := ValueMap{} + + for m_k, m_v := range m { + n_v, ok := n[m_k] + if !ok { + panic("key not found") + } + + o[m_k] = m_v.Mul(n_v) + } + + return o +} + +func (m ValueMap) Div(n ValueMap) ValueMap { + if len(m) != len(n) { + panic("unequal length") + } + + o := ValueMap{} + + for m_k, m_v := range m { + n_v, ok := n[m_k] + if !ok { + panic("key not found") + } + + o[m_k] = m_v.Div(n_v) + } + + return o +} + +func (m ValueMap) AddScalar(x fixedpoint.Value) ValueMap { + o := ValueMap{} + + for k, v := range m { + o[k] = v.Add(x) + } + + return o +} + +func (m ValueMap) SubScalar(x fixedpoint.Value) ValueMap { + o := ValueMap{} + + for k, v := range m { + o[k] = v.Sub(x) + } + + return o +} + +func (m ValueMap) MulScalar(x fixedpoint.Value) ValueMap { + o := ValueMap{} + + for k, v := range m { + o[k] = v.Mul(x) + } + + return o +} + +func (m ValueMap) DivScalar(x fixedpoint.Value) ValueMap { + o := ValueMap{} + + for k, v := range m { + o[k] = v.Div(x) + } + + return o +} + +func (m ValueMap) Sum() fixedpoint.Value { + var sum fixedpoint.Value + for _, v := range m { + sum = sum.Add(v) + } + return sum +} + +func (m ValueMap) Normalize() ValueMap { + sum := m.Sum() + if sum.Eq(fixedpoint.Zero) { + panic("zero sum") + } + + return m.DivScalar(sum) +} diff --git a/pkg/types/value_map_test.go b/pkg/types/value_map_test.go new file mode 100644 index 0000000000..c6eae497a6 --- /dev/null +++ b/pkg/types/value_map_test.go @@ -0,0 +1,125 @@ +package types + +import ( + "testing" + + "github.com/c9s/bbgo/pkg/fixedpoint" + "github.com/stretchr/testify/assert" +) + +func Test_ValueMap_Eq(t *testing.T) { + m1 := ValueMap{ + "A": fixedpoint.NewFromFloat(3.0), + "B": fixedpoint.NewFromFloat(4.0), + } + + m2 := ValueMap{} + + m3 := ValueMap{"A": fixedpoint.NewFromFloat(5.0)} + + m4 := ValueMap{ + "A": fixedpoint.NewFromFloat(6.0), + "B": fixedpoint.NewFromFloat(7.0), + } + + m5 := ValueMap{ + "A": fixedpoint.NewFromFloat(3.0), + "B": fixedpoint.NewFromFloat(4.0), + } + + assert.True(t, m1.Eq(m1)) + assert.False(t, m1.Eq(m2)) + assert.False(t, m1.Eq(m3)) + assert.False(t, m1.Eq(m4)) + assert.True(t, m1.Eq(m5)) +} + +func Test_ValueMap_Add(t *testing.T) { + m1 := ValueMap{ + "A": fixedpoint.NewFromFloat(3.0), + "B": fixedpoint.NewFromFloat(4.0), + } + + m2 := ValueMap{ + "A": fixedpoint.NewFromFloat(5.0), + "B": fixedpoint.NewFromFloat(6.0), + } + + m3 := ValueMap{ + "A": fixedpoint.NewFromFloat(8.0), + "B": fixedpoint.NewFromFloat(10.0), + } + + m4 := ValueMap{"A": fixedpoint.NewFromFloat(8.0)} + + assert.Equal(t, m3, m1.Add(m2)) + assert.Panics(t, func() { m1.Add(m4) }) +} + +func Test_ValueMap_AddScalar(t *testing.T) { + x := fixedpoint.NewFromFloat(5.0) + + m1 := ValueMap{ + "A": fixedpoint.NewFromFloat(3.0), + "B": fixedpoint.NewFromFloat(4.0), + } + + m2 := ValueMap{ + "A": fixedpoint.NewFromFloat(3.0).Add(x), + "B": fixedpoint.NewFromFloat(4.0).Add(x), + } + + assert.Equal(t, m2, m1.AddScalar(x)) +} + +func Test_ValueMap_DivScalar(t *testing.T) { + x := fixedpoint.NewFromFloat(5.0) + + m1 := ValueMap{ + "A": fixedpoint.NewFromFloat(3.0), + "B": fixedpoint.NewFromFloat(4.0), + } + + m2 := ValueMap{ + "A": fixedpoint.NewFromFloat(3.0).Div(x), + "B": fixedpoint.NewFromFloat(4.0).Div(x), + } + + assert.Equal(t, m2, m1.DivScalar(x)) +} + +func Test_ValueMap_Sum(t *testing.T) { + m := ValueMap{ + "A": fixedpoint.NewFromFloat(3.0), + "B": fixedpoint.NewFromFloat(4.0), + } + + assert.Equal(t, fixedpoint.NewFromFloat(7.0), m.Sum()) +} + +func Test_ValueMap_Normalize(t *testing.T) { + a := fixedpoint.NewFromFloat(3.0) + b := fixedpoint.NewFromFloat(4.0) + c := a.Add(b) + + m := ValueMap{ + "A": a, + "B": b, + } + + n := ValueMap{ + "A": a.Div(c), + "B": b.Div(c), + } + + assert.True(t, m.Normalize().Eq(n)) +} + +func Test_ValueMap_Normalize_zero_sum(t *testing.T) { + m := ValueMap{ + "A": fixedpoint.Zero, + "B": fixedpoint.Zero, + } + + assert.Panics(t, func() { m.Normalize() }) +} diff --git a/pkg/types/withdraw.go b/pkg/types/withdraw.go index 803d585a62..18781341cd 100644 --- a/pkg/types/withdraw.go +++ b/pkg/types/withdraw.go @@ -1,22 +1,67 @@ package types -import "time" +import ( + "fmt" + "time" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) type Withdraw struct { - ID string `json:"id"` - Asset string `json:"asset"` - Amount float64 `json:"amount"` - Address string `json:"address"` - AddressTag string `json:"addressTag"` - Status string `json:"status"` - - TransactionID string `json:"txId"` - TransactionFee float64 `json:"transactionFee"` - WithdrawOrderID string `json:"withdrawOrderId"` - ApplyTime time.Time `json:"applyTime"` - Network string `json:"network"` + GID int64 `json:"gid" db:"gid"` + Exchange ExchangeName `json:"exchange" db:"exchange"` + Asset string `json:"asset" db:"asset"` + Amount fixedpoint.Value `json:"amount" db:"amount"` + Address string `json:"address" db:"address"` + AddressTag string `json:"addressTag"` + Status string `json:"status"` + + TransactionID string `json:"transactionID" db:"txn_id"` + TransactionFee fixedpoint.Value `json:"transactionFee" db:"txn_fee"` + TransactionFeeCurrency string `json:"transactionFeeCurrency" db:"txn_fee_currency"` + WithdrawOrderID string `json:"withdrawOrderId"` + ApplyTime Time `json:"applyTime" db:"time"` + Network string `json:"network" db:"network"` +} + +func cutstr(s string, maxLen, head, tail int) string { + if len(s) > maxLen { + l := len(s) + return s[0:head] + "..." + s[l-tail:] + } + return s +} + +func (w Withdraw) String() (o string) { + o = fmt.Sprintf("%s WITHDRAW %8f %s -> ", w.Exchange, w.Amount.Float64(), w.Asset) + + if len(w.Network) > 0 && w.Network != w.Asset { + o += w.Network + ":" + } + + o += fmt.Sprintf("%s @ %s", w.Address, w.ApplyTime.Time()) + + if !w.TransactionFee.IsZero() { + feeCurrency := w.TransactionFeeCurrency + if feeCurrency == "" { + feeCurrency = w.Asset + } + + o += fmt.Sprintf(" FEE %4f %5s", w.TransactionFee.Float64(), feeCurrency) + } + + if len(w.TransactionID) > 0 { + o += fmt.Sprintf(" TxID: %s", cutstr(w.TransactionID, 12, 4, 4)) + } + + return o } func (w Withdraw) EffectiveTime() time.Time { - return w.ApplyTime + return w.ApplyTime.Time() +} + +type WithdrawalOptions struct { + Network string + AddressTag string } diff --git a/pkg/util/dir.go b/pkg/util/dir.go new file mode 100644 index 0000000000..5e1914c0e2 --- /dev/null +++ b/pkg/util/dir.go @@ -0,0 +1,23 @@ +package util + +import ( + "fmt" + "os" +) + +func SafeMkdirAll(p string) error { + st, err := os.Stat(p) + if err == nil { + if !st.IsDir() { + return fmt.Errorf("path %s is not a directory", p) + } + + return nil + } + + if os.IsNotExist(err) { + return os.MkdirAll(p, 0755) + } + + return nil +} diff --git a/pkg/util/envvars.go b/pkg/util/envvars.go new file mode 100644 index 0000000000..9ad3a714e8 --- /dev/null +++ b/pkg/util/envvars.go @@ -0,0 +1,63 @@ +package util + +import ( + "os" + "strconv" + "time" + + "github.com/sirupsen/logrus" +) + +func GetEnvVarDuration(n string) (time.Duration, bool) { + str, ok := os.LookupEnv(n) + if !ok { + return 0, false + } + + du, err := time.ParseDuration(str) + if err != nil { + logrus.WithError(err).Errorf("can not parse env var %q as time.Duration, incorrect format", str) + return 0, false + } + + return du, true +} + +func GetEnvVarInt(n string) (int, bool) { + str, ok := os.LookupEnv(n) + if !ok { + return 0, false + } + + num, err := strconv.Atoi(str) + if err != nil { + logrus.WithError(err).Errorf("can not parse env var %q as int, incorrect format", str) + return 0, false + } + + return num, true +} + +func SetEnvVarBool(n string, v *bool) bool { + b, ok := GetEnvVarBool(n) + if ok { + *v = b + } + + return ok +} + +func GetEnvVarBool(n string) (bool, bool) { + str, ok := os.LookupEnv(n) + if !ok { + return false, false + } + + num, err := strconv.ParseBool(str) + if err != nil { + logrus.WithError(err).Errorf("can not parse env var %q as bool, incorrect format", str) + return false, false + } + + return num, true +} diff --git a/pkg/util/exchange_icon.go b/pkg/util/exchange_icon.go new file mode 100644 index 0000000000..c7d868219f --- /dev/null +++ b/pkg/util/exchange_icon.go @@ -0,0 +1 @@ +package util diff --git a/pkg/util/fnv.go b/pkg/util/fnv.go new file mode 100644 index 0000000000..9c84294001 --- /dev/null +++ b/pkg/util/fnv.go @@ -0,0 +1,9 @@ +package util + +import "hash/fnv" + +func FNV32(s string) uint32 { + h := fnv.New32a() + h.Write([]byte(s)) + return h.Sum32() +} diff --git a/pkg/util/http_response.go b/pkg/util/http_response.go new file mode 100644 index 0000000000..392f14b6f6 --- /dev/null +++ b/pkg/util/http_response.go @@ -0,0 +1,58 @@ +package util + +import ( + "encoding/json" + "io/ioutil" + "net/http" +) + +// Response is wrapper for standard http.Response and provides +// more methods. +type Response struct { + *http.Response + + // Body overrides the composited Body field. + Body []byte +} + +// NewResponse is a wrapper of the http.Response instance, it reads the response body and close the file. +func NewResponse(r *http.Response) (response *Response, err error) { + body, err := ioutil.ReadAll(r.Body) + if err != nil { + return nil, err + } + + err = r.Body.Close() + response = &Response{Response: r, Body: body} + return response, err +} + +// String converts response body to string. +// An empty string will be returned if error. +func (r *Response) String() string { + return string(r.Body) +} + +func (r *Response) DecodeJSON(o interface{}) error { + return json.Unmarshal(r.Body, o) +} + +func (r *Response) IsError() bool { + return r.StatusCode >= 400 +} + +func (r *Response) IsJSON() bool { + switch r.Header.Get("content-type") { + case "text/json", "application/json", "application/json; charset=utf-8": + return true + } + return false +} + +func (r *Response) IsHTML() bool { + switch r.Header.Get("content-type") { + case "text/html": + return true + } + return false +} diff --git a/pkg/util/http_response_test.go b/pkg/util/http_response_test.go new file mode 100644 index 0000000000..af864f8257 --- /dev/null +++ b/pkg/util/http_response_test.go @@ -0,0 +1,74 @@ +package util + +import ( + "bytes" + "io/ioutil" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestResponse_DecodeJSON(t *testing.T) { + type temp struct { + Name string `json:"name"` + } + json := `{"name":"Test Name","a":"a"}` + reader := ioutil.NopCloser(bytes.NewReader([]byte(json))) + resp, err := NewResponse(&http.Response{ + StatusCode: 200, + Body: reader, + }) + assert.NoError(t, err) + assert.Equal(t, json, resp.String()) + + var result temp + assert.NoError(t, resp.DecodeJSON(&result)) + assert.Equal(t, "Test Name", result.Name) +} + +func TestResponse_IsError(t *testing.T) { + resp := &Response{Response: &http.Response{}} + cases := map[int]bool{ + 100: false, + 200: false, + 300: false, + 400: true, + 500: true, + } + + for code, isErr := range cases { + resp.StatusCode = code + assert.Equal(t, isErr, resp.IsError()) + } +} + +func TestResponse_IsJSON(t *testing.T) { + cases := map[string]bool{ + "text/json": true, + "application/json": true, + "application/json; charset=utf-8": true, + "text/html": false, + } + for k, v := range cases { + resp := &Response{Response: &http.Response{}} + resp.Header = http.Header{} + resp.Header.Set("content-type", k) + assert.Equal(t, v, resp.IsJSON()) + } +} + +func TestResponse_IsHTML(t *testing.T) { + cases := map[string]bool{ + "text/json": false, + "application/json": false, + "application/json; charset=utf-8": false, + "text/html": true, + } + for k, v := range cases { + resp := &Response{Response: &http.Response{}} + resp.Header = http.Header{} + resp.Header.Set("content-type", k) + assert.Equal(t, v, resp.IsHTML()) + } +} diff --git a/pkg/util/json.go b/pkg/util/json.go new file mode 100644 index 0000000000..62ea74311b --- /dev/null +++ b/pkg/util/json.go @@ -0,0 +1,15 @@ +package util + +import ( + "encoding/json" + "io/ioutil" +) + +func WriteJsonFile(p string, obj interface{}) error { + out, err := json.MarshalIndent(obj, "", " ") + if err != nil { + return err + } + + return ioutil.WriteFile(p, out, 0644) +} diff --git a/pkg/util/math.go b/pkg/util/math.go index f9b984027e..31e49b5411 100644 --- a/pkg/util/math.go +++ b/pkg/util/math.go @@ -1,6 +1,7 @@ package util import ( + "github.com/c9s/bbgo/pkg/fixedpoint" "math" "strconv" ) @@ -18,6 +19,10 @@ func Pow10(n int64) int64 { return Pow10Table[n] } +func FormatValue(val fixedpoint.Value, prec int) string { + return val.FormatString(prec) +} + func FormatFloat(val float64, prec int) string { return strconv.FormatFloat(val, 'f', prec, 64) } diff --git a/pkg/util/paper_trade.go b/pkg/util/paper_trade.go new file mode 100644 index 0000000000..b3a09d68b5 --- /dev/null +++ b/pkg/util/paper_trade.go @@ -0,0 +1,6 @@ +package util + +func IsPaperTrade() bool { + v, ok := GetEnvVarBool("PAPER_TRADE") + return ok && v +} diff --git a/pkg/util/profile.go b/pkg/util/profile.go new file mode 100644 index 0000000000..1d3753aa23 --- /dev/null +++ b/pkg/util/profile.go @@ -0,0 +1,42 @@ +package util + +import ( + "time" +) + +type TimeProfile struct { + Name string + StartTime, EndTime time.Time + Duration time.Duration +} + +func StartTimeProfile(args ...string) TimeProfile { + name := "" + if len(args) > 0 { + name = args[0] + } + return TimeProfile{StartTime: time.Now(), Name: name} +} + +func (p *TimeProfile) TilNow() time.Duration { + return time.Since(p.StartTime) +} + +func (p *TimeProfile) Stop() time.Duration { + p.EndTime = time.Now() + p.Duration = p.EndTime.Sub(p.StartTime) + return p.Duration +} + +type logFunction func(format string, args ...interface{}) + +func (p *TimeProfile) StopAndLog(f logFunction) { + duration := p.Stop() + s := "[profile] " + if len(p.Name) > 0 { + s += p.Name + } + + s += " " + duration.String() + f(s) +} diff --git a/pkg/util/render.go b/pkg/util/render.go index 0c317ed99e..69ccb78b26 100644 --- a/pkg/util/render.go +++ b/pkg/util/render.go @@ -2,21 +2,24 @@ package util import ( "bytes" - "github.com/sirupsen/logrus" "text/template" + + "github.com/sirupsen/logrus" ) func Render(tpl string, args interface{}) string { var buf = bytes.NewBuffer(nil) tmpl, err := template.New("tmp").Parse(tpl) if err != nil { - logrus.WithError(err).Error("template error") + logrus.WithError(err).Error("template parse error") return "" } + err = tmpl.Execute(buf, args) if err != nil { - logrus.WithError(err).Error("template error") + logrus.WithError(err).Error("template execute error") return "" } + return buf.String() } diff --git a/pkg/util/reonce.go b/pkg/util/reonce.go new file mode 100644 index 0000000000..f1fae5ddaf --- /dev/null +++ b/pkg/util/reonce.go @@ -0,0 +1,33 @@ +package util + +import ( + "sync" + "sync/atomic" +) + +type Reonce struct { + done uint32 + m sync.Mutex +} + +func (o *Reonce) Reset() { + o.m.Lock() + atomic.StoreUint32(&o.done, 0) + o.m.Unlock() +} + +func (o *Reonce) Do(f func()) { + if atomic.LoadUint32(&o.done) == 0 { + // Outlined slow-path to allow inlining of the fast-path. + o.doSlow(f) + } +} + +func (o *Reonce) doSlow(f func()) { + o.m.Lock() + defer o.m.Unlock() + if o.done == 0 { + defer atomic.StoreUint32(&o.done, 1) + f() + } +} diff --git a/pkg/util/reonce_test.go b/pkg/util/reonce_test.go new file mode 100644 index 0000000000..bd35284811 --- /dev/null +++ b/pkg/util/reonce_test.go @@ -0,0 +1,39 @@ +package util + +import ( + "sync" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestReonce_DoAndReset(t *testing.T) { + var cnt = 0 + var reonce Reonce + var wgAll, wg sync.WaitGroup + wg.Add(1) + wgAll.Add(2) + go reonce.Do(func() { + t.Log("once #1") + time.Sleep(10 * time.Millisecond) + cnt++ + wg.Done() + wgAll.Done() + }) + + // make sure it's locked + wg.Wait() + t.Logf("reset") + reonce.Reset() + + go reonce.Do(func() { + t.Log("once #2") + time.Sleep(10 * time.Millisecond) + cnt++ + wgAll.Done() + }) + + wgAll.Wait() + assert.Equal(t, 2, cnt) +} diff --git a/pkg/util/simple_args.go b/pkg/util/simple_args.go new file mode 100644 index 0000000000..47c0234b2b --- /dev/null +++ b/pkg/util/simple_args.go @@ -0,0 +1,30 @@ +package util + +import ( + "reflect" + + "github.com/c9s/bbgo/pkg/fixedpoint" +) + +// FilterSimpleArgs filters out the simple type arguments +// int, string, bool, and []byte +func FilterSimpleArgs(args []interface{}) (simpleArgs []interface{}) { + for _, arg := range args { + switch arg.(type) { + case int, int64, int32, uint64, uint32, string, []byte, float64, float32, fixedpoint.Value: + simpleArgs = append(simpleArgs, arg) + default: + rt := reflect.TypeOf(arg) + if rt.Kind() == reflect.Ptr { + rt = rt.Elem() + } + + switch rt.Kind() { + case reflect.Float64, reflect.Float32, reflect.String, reflect.Int, reflect.Int32, reflect.Uint32, reflect.Int64, reflect.Uint64, reflect.Bool: + simpleArgs = append(simpleArgs, arg) + } + } + } + + return simpleArgs +} diff --git a/pkg/util/string.go b/pkg/util/string.go new file mode 100644 index 0000000000..268233acbc --- /dev/null +++ b/pkg/util/string.go @@ -0,0 +1,29 @@ +package util + +import "strings" + +func StringSliceContains(slice []string, needle string) bool { + for _, s := range slice { + if s == needle { + return true + } + } + + return false +} + +func MaskKey(key string) string { + if len(key) == 0 { + return "{empty}" + } + + h := len(key) / 3 + if h > 5 { + h = 5 + } + + maskKey := key[0:h] + maskKey += strings.Repeat("*", len(key)-h*2) + maskKey += key[len(key)-h:] + return maskKey +} diff --git a/pkg/util/string_test.go b/pkg/util/string_test.go new file mode 100644 index 0000000000..7d55f425a7 --- /dev/null +++ b/pkg/util/string_test.go @@ -0,0 +1,42 @@ +package util + +import "testing" + +func TestMaskKey(t *testing.T) { + type args struct { + key string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "key length more than 5", + args: args{key: "abcdefghijklmnopqr"}, + want: "abcde********nopqr", + }, + { + name: "key length less than 10", + args: args{key: "12345678"}, + want: "12****78", + }, + { + name: "even", + args: args{key: "1234567"}, + want: "12***67", + }, + { + name: "empty", + args: args{key: ""}, + want: "{empty}", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := MaskKey(tt.args.key); got != tt.want { + t.Errorf("MaskKey() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/util/time.go b/pkg/util/time.go new file mode 100644 index 0000000000..d95d1a9a14 --- /dev/null +++ b/pkg/util/time.go @@ -0,0 +1,35 @@ +package util + +import ( + "fmt" + "math/rand" + "time" +) + +func MillisecondsJitter(d time.Duration, jitterInMilliseconds int) time.Duration { + n := rand.Intn(jitterInMilliseconds) + return d + time.Duration(n)*time.Millisecond +} + +func BeginningOfTheDay(t time.Time) time.Time { + year, month, day := t.Date() + return time.Date(year, month, day, 0, 0, 0, 0, t.Location()) +} + +func Over24Hours(since time.Time) bool { + return time.Since(since) >= 24*time.Hour +} + +func UnixMilli() int64 { + return time.Now().UnixNano() / int64(time.Millisecond) +} + +func ParseTimeWithFormats(strTime string, formats []string) (time.Time, error) { + for _, format := range formats { + tt, err := time.Parse(format, strTime) + if err == nil { + return tt, nil + } + } + return time.Time{}, fmt.Errorf("failed to parse time %s, valid formats are %+v", strTime, formats) +} diff --git a/pkg/version/dev.go b/pkg/version/dev.go new file mode 100644 index 0000000000..071b787c90 --- /dev/null +++ b/pkg/version/dev.go @@ -0,0 +1,8 @@ +//go:build !release +// +build !release + +package version + +const Version = "v1.35.0-daaa3352-dev" + +const VersionGitRef = "daaa3352" diff --git a/pkg/version/version.go b/pkg/version/version.go new file mode 100644 index 0000000000..8a443c9875 --- /dev/null +++ b/pkg/version/version.go @@ -0,0 +1,8 @@ +//go:build release +// +build release + +package version + +const Version = "v1.35.0-daaa3352" + +const VersionGitRef = "daaa3352" diff --git a/python/.gitignore b/python/.gitignore new file mode 100644 index 0000000000..b6e47617de --- /dev/null +++ b/python/.gitignore @@ -0,0 +1,129 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ diff --git a/python/README.md b/python/README.md new file mode 100644 index 0000000000..e0b3a9f63f --- /dev/null +++ b/python/README.md @@ -0,0 +1,36 @@ +# pybbgo + +## Installation + +```sh +cd +pip install . +``` + +## Usage + +### Stream + +```python +from loguru import logger + +from bbgo import Stream +from bbgo.data import Event +from bbgo.handlers import UpdateHandler + + +class LogBook(UpdateHandler): + + def handle(self, event: Event) -> None: + logger.info(event) + + +host = '127.0.0.1' +port = 50051 + +stream = Stream(host, port) +stream.subscribe('max', 'book', 'BTCUSDT', 'full') +stream.subscribe('max', 'book', 'ETHUSDT', 'full') +stream.add_event_handler(LogBook()) +stream.start() +``` diff --git a/python/bbgo/__init__.py b/python/bbgo/__init__.py new file mode 100644 index 0000000000..bbb02050ce --- /dev/null +++ b/python/bbgo/__init__.py @@ -0,0 +1,7 @@ +from . import enums +from . import handlers +from . import utils +from .services import MarketService +from .services import TradingService +from .services import UserDataService +from .stream import Stream diff --git a/python/bbgo/data/__init__.py b/python/bbgo/data/__init__.py new file mode 100644 index 0000000000..e579559d8d --- /dev/null +++ b/python/bbgo/data/__init__.py @@ -0,0 +1,13 @@ +from .balance import Balance +from .depth import Depth +from .depth import PriceVolume +from .error import ErrorMessage +from .event import Event +from .event import MarketDataEvent +from .event import UserDataEvent +from .kline import KLine +from .order import Order +from .submit_order import SubmitOrder +from .subscription import Subscription +from .ticker import Ticker +from .trade import Trade diff --git a/python/bbgo/data/balance.py b/python/bbgo/data/balance.py new file mode 100644 index 0000000000..e81b58b4c0 --- /dev/null +++ b/python/bbgo/data/balance.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +from dataclasses import dataclass +from decimal import Decimal + +import bbgo_pb2 + +from ..utils import parse_number + + +@dataclass +class Balance: + exchange: str + currency: str + available: Decimal + locked: Decimal + borrowed: Decimal + + @classmethod + def from_pb(cls, obj: bbgo_pb2.Balance) -> Balance: + return cls( + exchange=obj.exchange, + currency=obj.currency, + available=parse_number(obj.available), + locked=parse_number(obj.locked), + borrowed=parse_number(obj.borrowed), + ) + + def total(self) -> Decimal: + return self.available + self.locked diff --git a/python/bbgo/data/depth.py b/python/bbgo/data/depth.py new file mode 100644 index 0000000000..55d9c26f85 --- /dev/null +++ b/python/bbgo/data/depth.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +from dataclasses import dataclass +from decimal import Decimal +from typing import List + +import bbgo_pb2 + +from ..utils import parse_number + + +@dataclass +class Depth: + exchange: str + symbol: str + asks: List[PriceVolume] + bids: List[PriceVolume] + + @classmethod + def from_pb(cls, obj: bbgo_pb2.Depth): + return cls( + exchange=obj.exchange, + symbol=obj.symbol, + asks=[PriceVolume.from_pb(ask) for ask in obj.asks], + bids=[PriceVolume.from_pb(bid) for bid in obj.bids], + ) + + +@dataclass +class PriceVolume: + price: Decimal + volume: Decimal + + @classmethod + def from_pb(cls, obj: bbgo_pb2.PriceVolume): + return cls( + price=parse_number(obj.price), + volume=parse_number(obj.volume), + ) diff --git a/python/bbgo/data/error.py b/python/bbgo/data/error.py new file mode 100644 index 0000000000..5b713b8ed9 --- /dev/null +++ b/python/bbgo/data/error.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from dataclasses import dataclass + +import bbgo_pb2 + + +@dataclass +class ErrorMessage: + code: int + message: str + + @classmethod + def from_pb(cls, obj: bbgo_pb2.Error) -> ErrorMessage: + return cls( + code=obj.error_code, + message=obj.error_message, + ) diff --git a/python/bbgo/data/event.py b/python/bbgo/data/event.py new file mode 100644 index 0000000000..add2c555f1 --- /dev/null +++ b/python/bbgo/data/event.py @@ -0,0 +1,84 @@ +from __future__ import annotations + +from dataclasses import dataclass +from datetime import datetime +from typing import List + +import bbgo_pb2 + +from ..enums import ChannelType +from ..enums import EventType +from ..utils import parse_time +from .balance import Balance +from .depth import Depth +from .error import ErrorMessage +from .kline import KLine +from .order import Order +from .ticker import Ticker +from .trade import Trade + + +@dataclass +class Event: + session: str + exchange: str + channel_type: ChannelType + event_type: EventType + + +@dataclass +class UserDataEvent(Event): + balances: List[Balance] = None + trades: List[Trade] = None + orders: List[Order] = None + + @classmethod + def from_pb(cls, obj: bbgo_pb2.UserData) -> UserDataEvent: + return cls( + session=obj.session, + exchange=obj.exchange, + channel_type=ChannelType(obj.channel), + event_type=EventType(obj.event), + balances=[Balance.from_pb(balance) for balance in obj.balances], + trades=[Trade.from_pb(trade) for trade in obj.trades], + orders=[Order.from_pb(order) for order in obj.orders], + ) + + +@dataclass +class MarketDataEvent(Event): + symbol: str + subscribed_at: datetime + error: ErrorMessage + depth: Depth = None + kline: KLine = None + ticker: Ticker = None + trades: List[Trade] = None + + @classmethod + def from_pb(cls, obj: bbgo_pb2.MarketData) -> MarketDataEvent: + channel_type = ChannelType(obj.channel) + + event = cls( + session=obj.session, + exchange=obj.exchange, + symbol=obj.symbol, + channel_type=channel_type, + event_type=EventType(obj.event), + subscribed_at=parse_time(obj.subscribed_at), + error=ErrorMessage.from_pb(obj.error), + ) + + if channel_type == ChannelType.BOOK: + event.depth = Depth.from_pb(obj.depth) + + if channel_type == ChannelType.KLINE: + event.kline = KLine.from_pb(obj.kline) + + if channel_type == ChannelType.TICKER: + event.ticker = Ticker.from_pb(obj.ticker) + + if channel_type == ChannelType.TRADE: + event.trades = [Trade.from_pb(trade) for trade in obj.trades] + + return event diff --git a/python/bbgo/data/kline.py b/python/bbgo/data/kline.py new file mode 100644 index 0000000000..42f3563d33 --- /dev/null +++ b/python/bbgo/data/kline.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +from dataclasses import dataclass +from datetime import datetime +from decimal import Decimal + +import bbgo_pb2 + +from ..utils import parse_number +from ..utils import parse_time + + +@dataclass +class KLine: + exchange: str + symbol: str + open: Decimal + high: Decimal + low: Decimal + close: Decimal + volume: Decimal + session: str = None + start_time: datetime = None + end_time: datetime = None + quote_volume: Decimal = None + closed: bool = None + + @classmethod + def from_pb(cls, obj: bbgo_pb2.KLine) -> KLine: + return cls( + exchange=obj.exchange, + symbol=obj.symbol, + open=parse_number(obj.open), + high=parse_number(obj.high), + low=parse_number(obj.low), + close=parse_number(obj.close), + volume=parse_number(obj.volume), + quote_volume=parse_number(obj.quote_volume), + start_time=parse_time(obj.start_time), + end_time=parse_time(obj.end_time), + closed=obj.closed, + ) diff --git a/python/bbgo/data/order.py b/python/bbgo/data/order.py new file mode 100644 index 0000000000..c9c61d1a82 --- /dev/null +++ b/python/bbgo/data/order.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +from dataclasses import dataclass +from datetime import datetime +from decimal import Decimal + +import bbgo_pb2 + +from ..enums import OrderType +from ..enums import SideType +from ..utils import parse_number +from ..utils import parse_time + + +@dataclass +class Order: + exchange: str + symbol: str + order_id: str + side: SideType + order_type: OrderType + price: Decimal + stop_price: Decimal + status: str + quantity: Decimal + executed_quantity: Decimal + client_order_id: str + group_id: int + created_at: datetime + + @classmethod + def from_pb(cls, obj: bbgo_pb2.Order) -> Order: + return cls( + exchange=obj.exchange, + symbol=obj.symbol, + order_id=obj.id, + side=SideType(obj.side), + order_type=OrderType(obj.order_type), + price=parse_number(obj.price), + stop_price=parse_number(obj.stop_price), + status=obj.status, + quantity=parse_number(obj.quantity), + executed_quantity=parse_number(obj.executed_quantity), + client_order_id=obj.client_order_id, + group_id=obj.group_id, + created_at=parse_time(obj.created_at), + ) diff --git a/python/bbgo/data/submit_order.py b/python/bbgo/data/submit_order.py new file mode 100644 index 0000000000..bc3572bbbf --- /dev/null +++ b/python/bbgo/data/submit_order.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +from dataclasses import dataclass +from decimal import Decimal + +import bbgo_pb2 + +from ..enums import OrderType +from ..enums import SideType + + +@dataclass +class SubmitOrder: + session: str + exchange: str + symbol: str + side: SideType + quantity: Decimal + order_type: OrderType + price: Decimal = None + stop_price: Decimal = None + client_order_id: str = None + group_id: int = None + + def to_pb(self) -> bbgo_pb2.SubmitOrder: + return bbgo_pb2.SubmitOrder( + session=self.session, + exchange=self.exchange, + symbol=self.symbol, + side=self.side.value, + price=str(self.price or ""), + quantity=str(self.quantity or ""), + stop_price=str(self.stop_price or ""), + order_type=self.order_type.value, + client_order_id=self.client_order_id or "", + group_id=self.group_id or 0, + ) diff --git a/python/bbgo/data/subscription.py b/python/bbgo/data/subscription.py new file mode 100644 index 0000000000..03056129cf --- /dev/null +++ b/python/bbgo/data/subscription.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +from dataclasses import dataclass + +import bbgo_pb2 + +from ..enums import ChannelType +from ..enums import DepthType + + +@dataclass +class Subscription: + exchange: str + channel: ChannelType + symbol: str + depth: DepthType = None + interval: str = None + + def to_pb(self) -> bbgo_pb2.Subscription: + subscription_pb = bbgo_pb2.Subscription( + exchange=self.exchange, + channel=self.channel.value, + symbol=self.symbol, + ) + + if self.depth is not None: + subscription_pb.depth = self.depth.value + + if self.interval is not None: + subscription_pb.interval = self.interval + + return subscription_pb diff --git a/python/bbgo/data/ticker.py b/python/bbgo/data/ticker.py new file mode 100644 index 0000000000..e809aa3c10 --- /dev/null +++ b/python/bbgo/data/ticker.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +from dataclasses import dataclass +from decimal import Decimal + +import bbgo_pb2 + +from ..utils import parse_number + + +@dataclass +class Ticker: + exchange: str + symbol: str + open: Decimal + high: Decimal + low: Decimal + close: Decimal + volume: Decimal + + @classmethod + def from_pb(cls, obj: bbgo_pb2.KLine) -> Ticker: + return cls( + exchange=obj.exchange, + symbol=obj.symbol, + open=parse_number(obj.open), + high=parse_number(obj.high), + low=parse_number(obj.low), + close=parse_number(obj.close), + volume=parse_number(obj.volume), + ) diff --git a/python/bbgo/data/trade.py b/python/bbgo/data/trade.py new file mode 100644 index 0000000000..9602225f30 --- /dev/null +++ b/python/bbgo/data/trade.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +from dataclasses import dataclass +from datetime import datetime +from decimal import Decimal + +import bbgo_pb2 + +from ..enums import SideType +from ..utils import parse_number +from ..utils import parse_time + + +@dataclass +class Trade: + session: str + exchange: str + symbol: str + trade_id: str + price: Decimal + quantity: Decimal + created_at: datetime + side: SideType + fee_currency: str + fee: Decimal + maker: bool + + @classmethod + def from_pb(cls, obj: bbgo_pb2.Trade) -> Trade: + return cls( + session=obj.session, + exchange=obj.exchange, + symbol=obj.symbol, + trade_id=obj.id, + price=parse_number(obj.price), + quantity=parse_number(obj.quantity), + created_at=parse_time(obj.created_at), + side=SideType(obj.side), + fee_currency=obj.fee_currency, + fee=parse_number(obj.fee), + maker=obj.maker, + ) diff --git a/python/bbgo/enums/__init__.py b/python/bbgo/enums/__init__.py new file mode 100644 index 0000000000..c3aa817ceb --- /dev/null +++ b/python/bbgo/enums/__init__.py @@ -0,0 +1,5 @@ +from .channel_type import ChannelType +from .depth_type import DepthType +from .event_type import EventType +from .order_type import OrderType +from .side_type import SideType diff --git a/python/bbgo/enums/channel_type.py b/python/bbgo/enums/channel_type.py new file mode 100644 index 0000000000..9954e04379 --- /dev/null +++ b/python/bbgo/enums/channel_type.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +from enum import Enum + + +class ChannelType(Enum): + BOOK = 0 + TRADE = 1 + TICKER = 2 + KLINE = 3 + BALANCE = 4 + ORDER = 5 + + @classmethod + def from_str(cls, s: str) -> ChannelType: + return {t.name.lower(): t for t in cls}[s.lower()] diff --git a/python/bbgo/enums/depth_type.py b/python/bbgo/enums/depth_type.py new file mode 100644 index 0000000000..e022a8095f --- /dev/null +++ b/python/bbgo/enums/depth_type.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +from enum import Enum + + +# string depth = 4; // depth is for book, valid values are full, medium, 1, 5 and 20 +class DepthType(Enum): + FULL = 'full' + MEDIUM = 'medium' + DEPTH_1 = '1' + DEPTH_5 = '5' + DEPTH_20 = '20' + + @classmethod + def from_str(cls, s: str) -> DepthType: + return {t.name.lower(): t for t in cls}[s.lower()] diff --git a/python/bbgo/enums/event_type.py b/python/bbgo/enums/event_type.py new file mode 100644 index 0000000000..bfa74fcd5c --- /dev/null +++ b/python/bbgo/enums/event_type.py @@ -0,0 +1,17 @@ +from __future__ import annotations + +from enum import Enum + + +class EventType(Enum): + UNKNOWN = 0 + SUBSCRIBED = 1 + UNSUBSCRIBED = 2 + SNAPSHOT = 3 + UPDATE = 4 + AUTHENTICATED = 5 + ERROR = 99 + + @classmethod + def from_str(cls, s: str) -> EventType: + return {t.name.lower(): t for t in cls}[s.lower()] diff --git a/python/bbgo/enums/order_type.py b/python/bbgo/enums/order_type.py new file mode 100644 index 0000000000..c5ce444eba --- /dev/null +++ b/python/bbgo/enums/order_type.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +from enum import Enum + + +class OrderType(Enum): + MARKET = 0 + LIMIT = 1 + STOP_MARKET = 2 + STOP_LIMIT = 3 + POST_ONLY = 4 + IOC_LIMIT = 5 + + @classmethod + def from_str(cls, s: str) -> OrderType: + return {t.name.lower(): t for t in cls}[s.lower()] diff --git a/python/bbgo/enums/side_type.py b/python/bbgo/enums/side_type.py new file mode 100644 index 0000000000..dea1c0e97d --- /dev/null +++ b/python/bbgo/enums/side_type.py @@ -0,0 +1,12 @@ +from __future__ import annotations + +from enum import Enum + + +class SideType(Enum): + BUY = 0 + SELL = 1 + + @classmethod + def from_str(cls, s: str) -> SideType: + return {t.name.lower(): t for t in cls}[s.lower()] diff --git a/python/bbgo/handlers/__init__.py b/python/bbgo/handlers/__init__.py new file mode 100644 index 0000000000..60d677bd0d --- /dev/null +++ b/python/bbgo/handlers/__init__.py @@ -0,0 +1,21 @@ +from .balance import BalanceHandler +from .balance import BalanceSnapshotHandler +from .balance import BalanceUpdateHandler +from .book import BookSnapshotHandler +from .book import BookUpdateHandler +from .error import ErrorHandler +from .handler import Handler +from .kline import KLineHandler +from .kline import KLineSnapshotHandler +from .kline import KLineUpdateHandler +from .order import OrderHandler +from .order import OrderSnapshotHandler +from .order import OrderUpdateHandler +from .snapshot import SnapshotHandler +from .ticker import TickerHandler +from .ticker import TickerSnapshotHandler +from .ticker import TickerUpdateHandler +from .trade import TradeHandler +from .trade import TradeSnapshotHandler +from .trade import TradeUpdateHandler +from .update import UpdateHandler diff --git a/python/bbgo/handlers/balance.py b/python/bbgo/handlers/balance.py new file mode 100644 index 0000000000..7413afb884 --- /dev/null +++ b/python/bbgo/handlers/balance.py @@ -0,0 +1,31 @@ +from ..data import UserDataEvent +from ..enums import ChannelType +from ..enums import EventType +from .handler import Handler + + +class BalanceHandler(Handler): + + def __call__(self, event: UserDataEvent) -> None: + if event.channel_type != ChannelType.BALANCE: + return + + super(BalanceHandler, self).__call__(event) + + +class BalanceSnapshotHandler(BalanceHandler): + + def __call__(self, event: UserDataEvent) -> None: + if event.event_type != EventType.SNAPSHOT: + return + + super(BalanceSnapshotHandler, self).__call__(event) + + +class BalanceUpdateHandler(BalanceHandler): + + def __call__(self, event: UserDataEvent) -> None: + if event.event_type != EventType.UPDATE: + return + + super(BalanceUpdateHandler, self).__call__(event) diff --git a/python/bbgo/handlers/book.py b/python/bbgo/handlers/book.py new file mode 100644 index 0000000000..ebffb9036b --- /dev/null +++ b/python/bbgo/handlers/book.py @@ -0,0 +1,31 @@ +from ..data import MarketDataEvent +from ..enums import ChannelType +from ..enums import EventType +from .handler import Handler + + +class BookHandler(Handler): + + def __call__(self, event: MarketDataEvent) -> None: + if event.channel_type != ChannelType.BOOK: + return + + super(BookHandler, self).__call__(event) + + +class BookSnapshotHandler(BookHandler): + + def __call__(self, event: MarketDataEvent) -> None: + if event.event_type != EventType.SNAPSHOT: + return + + super(BookSnapshotHandler, self).__call__(event) + + +class BookUpdateHandler(BookHandler): + + def __call__(self, event: MarketDataEvent) -> None: + if event.event_type != EventType.UPDATE: + return + + super(BookUpdateHandler, self).__call__(event) diff --git a/python/bbgo/handlers/error.py b/python/bbgo/handlers/error.py new file mode 100644 index 0000000000..be214894f2 --- /dev/null +++ b/python/bbgo/handlers/error.py @@ -0,0 +1,12 @@ +from ..data import Event +from ..enums import EventType +from .handler import Handler + + +class ErrorHandler(Handler): + + def __call__(self, event: Event) -> None: + if event.event_type != EventType.ERROR: + return + + super(ErrorHandler, self).__call__(event) diff --git a/python/bbgo/handlers/handler.py b/python/bbgo/handlers/handler.py new file mode 100644 index 0000000000..5213ff3c25 --- /dev/null +++ b/python/bbgo/handlers/handler.py @@ -0,0 +1,10 @@ +from ..data import Event + + +class Handler(object): + + def __call__(self, event: Event) -> None: + self.handle(event) + + def handle(self, event: Event) -> None: + raise NotImplementedError diff --git a/python/bbgo/handlers/kline.py b/python/bbgo/handlers/kline.py new file mode 100644 index 0000000000..ddea19d005 --- /dev/null +++ b/python/bbgo/handlers/kline.py @@ -0,0 +1,31 @@ +from ..data import MarketDataEvent +from ..enums import ChannelType +from ..enums import EventType +from .handler import Handler + + +class KLineHandler(Handler): + + def __call__(self, event: MarketDataEvent) -> None: + if event.channel_type != ChannelType.KLINE: + return + + super(KLineHandler, self).__call__(event) + + +class KLineSnapshotHandler(KLineHandler): + + def __call__(self, event: MarketDataEvent) -> None: + if event.event_type != EventType.SNAPSHOT: + return + + super(KLineSnapshotHandler, self).__call__(event) + + +class KLineUpdateHandler(KLineHandler): + + def __call__(self, event: MarketDataEvent) -> None: + if event.event_type != EventType.UPDATE: + return + + super(KLineUpdateHandler, self).__call__(event) diff --git a/python/bbgo/handlers/order.py b/python/bbgo/handlers/order.py new file mode 100644 index 0000000000..ccebc934aa --- /dev/null +++ b/python/bbgo/handlers/order.py @@ -0,0 +1,31 @@ +from ..data import UserDataEvent +from ..enums import ChannelType +from ..enums import EventType +from .handler import Handler + + +class OrderHandler(Handler): + + def __call__(self, event: UserDataEvent) -> None: + if event.channel_type != ChannelType.ORDER: + return + + super(OrderHandler, self).__call__(event) + + +class OrderSnapshotHandler(OrderHandler): + + def __call__(self, event: UserDataEvent) -> None: + if event.event_type != EventType.SNAPSHOT: + return + + super(OrderSnapshotHandler, self).__call__(event) + + +class OrderUpdateHandler(OrderHandler): + + def __call__(self, event: UserDataEvent) -> None: + if event.event_type != EventType.UPDATE: + return + + super(OrderUpdateHandler, self).__call__(event) diff --git a/python/bbgo/handlers/snapshot.py b/python/bbgo/handlers/snapshot.py new file mode 100644 index 0000000000..8e61835c4b --- /dev/null +++ b/python/bbgo/handlers/snapshot.py @@ -0,0 +1,12 @@ +from ..data import Event +from ..enums import EventType +from .handler import Handler + + +class SnapshotHandler(Handler): + + def __call__(self, event: Event) -> None: + if event.event_type != EventType.SNAPSHOT: + return + + super(SnapshotHandler, self).__call__(event) diff --git a/python/bbgo/handlers/ticker.py b/python/bbgo/handlers/ticker.py new file mode 100644 index 0000000000..9e3d8458fc --- /dev/null +++ b/python/bbgo/handlers/ticker.py @@ -0,0 +1,31 @@ +from ..data import MarketDataEvent +from ..enums import ChannelType +from ..enums import EventType +from .handler import Handler + + +class TickerHandler(Handler): + + def __call__(self, event: MarketDataEvent) -> None: + if event.channel_type != ChannelType.TICKER: + return + + super(TickerHandler, self).__call__(event) + + +class TickerSnapshotHandler(TickerHandler): + + def __call__(self, event: MarketDataEvent) -> None: + if event.event_type != EventType.SNAPSHOT: + return + + super(TickerSnapshotHandler, self).__call__(event) + + +class TickerUpdateHandler(TickerHandler): + + def __call__(self, event: MarketDataEvent) -> None: + if event.event_type != EventType.UPDATE: + return + + super(TickerUpdateHandler, self).__call__(event) diff --git a/python/bbgo/handlers/trade.py b/python/bbgo/handlers/trade.py new file mode 100644 index 0000000000..41faf0aa80 --- /dev/null +++ b/python/bbgo/handlers/trade.py @@ -0,0 +1,31 @@ +from ..data import Event +from ..enums import ChannelType +from ..enums import EventType +from .handler import Handler + + +class TradeHandler(Handler): + + def __call__(self, event: Event) -> None: + if event.channel_type != ChannelType.TRADE: + return + + super(TradeHandler, self).__call__(event) + + +class TradeSnapshotHandler(TradeHandler): + + def __call__(self, event: Event) -> None: + if event.event_type != EventType.SNAPSHOT: + return + + super(TradeSnapshotHandler, self).__call__(event) + + +class TradeUpdateHandler(TradeHandler): + + def __call__(self, event: Event) -> None: + if event.event_type != EventType.UPDATE: + return + + super(TradeUpdateHandler, self).__call__(event) diff --git a/python/bbgo/handlers/update.py b/python/bbgo/handlers/update.py new file mode 100644 index 0000000000..34b4846830 --- /dev/null +++ b/python/bbgo/handlers/update.py @@ -0,0 +1,12 @@ +from ..data import Event +from ..enums import EventType +from .handler import Handler + + +class UpdateHandler(Handler): + + def __call__(self, event: Event) -> None: + if event.event_type != EventType.UPDATE: + return + + super(UpdateHandler, self).__call__(event) diff --git a/python/bbgo/services.py b/python/bbgo/services.py new file mode 100644 index 0000000000..48aa4a70c2 --- /dev/null +++ b/python/bbgo/services.py @@ -0,0 +1,179 @@ +from __future__ import annotations + +from typing import Iterator +from typing import List + +from loguru import logger + +import bbgo_pb2 +import bbgo_pb2_grpc + +from .data import ErrorMessage +from .data import KLine +from .data import MarketDataEvent +from .data import Order +from .data import SubmitOrder +from .data import Subscription +from .data import UserDataEvent +from .enums import OrderType +from .enums import SideType +from .utils import get_insecure_channel + + +class UserDataService(object): + stub: bbgo_pb2_grpc.UserDataServiceStub + + def __init__(self, host: str, port: int) -> None: + self.stub = bbgo_pb2_grpc.UserDataServiceStub(get_insecure_channel(host, port)) + + def subscribe(self, session: str) -> Iterator[UserDataEvent]: + request = bbgo_pb2.UserDataRequest(session) + response_iter = self.stub.Subscribe(request) + + for response in response_iter: + yield UserDataEvent.from_pb(response) + + +class MarketService(object): + stub: bbgo_pb2_grpc.MarketDataServiceStub + + def __init__(self, host: str, port: int) -> None: + self.stub = bbgo_pb2_grpc.MarketDataServiceStub(get_insecure_channel(host, port)) + + def subscribe(self, subscriptions: List[Subscription]) -> Iterator[MarketDataEvent]: + request = bbgo_pb2.SubscribeRequest(subscriptions=[s.to_pb() for s in subscriptions]) + response_iter = self.stub.Subscribe(request) + + for response in response_iter: + yield MarketDataEvent.from_pb(response) + + def query_klines(self, + exchange: str, + symbol: str, + limit: int = 30, + interval: str = '1m', + start_time: int = None, + end_time: int = None) -> List[KLine]: + request = bbgo_pb2.QueryKLinesRequest(exchange=exchange, + symbol=symbol, + limit=limit, + interval=interval, + start_time=start_time, + end_time=end_time) + + response = self.stub.QueryKLines(request) + + klines = [] + for kline in response.klines: + klines.append(KLine.from_pb(kline)) + + error = ErrorMessage.from_pb(response.error) + if error.code != 0: + logger.error(error.message) + + return klines + + +class TradingService(object): + stub: bbgo_pb2_grpc.TradingServiceStub + + def __init__(self, host: str, port: int) -> None: + self.stub = bbgo_pb2_grpc.TradingServiceStub(get_insecure_channel(host, port)) + + def submit_order(self, + session: str, + exchange: str, + symbol: str, + side: str, + quantity: float, + order_type: str, + price: float = None, + stop_price: float = None, + client_order_id: str = None, + group_id: int = None) -> Order: + submit_order = SubmitOrder(session=session, + exchange=exchange, + symbol=symbol, + side=SideType.from_str(side), + quantity=quantity, + order_type=OrderType.from_str(order_type), + price=price, + stop_price=stop_price, + client_order_id=client_order_id, + group_id=group_id) + + request = bbgo_pb2.SubmitOrderRequest(session=session, submit_orders=[submit_order.to_pb()]) + response = self.stub.SubmitOrder(request) + + order = Order.from_pb(response.orders[0]) + error = ErrorMessage.from_pb(response.error) + if error.code != 0: + logger.error(error.message) + + return order + + def cancel_order(self, session: str, order_id: int = None, client_order_id: int = None) -> Order: + request = bbgo_pb2.CancelOrderRequest( + session=session, + id=order_id or "", + client_order_id=client_order_id or "", + ) + response = self.stub.CancelOrder(request) + + order = Order.from_pb(response.order) + error = ErrorMessage.from_pb(response.error) + if error.code != 0: + logger.error(error.message) + + return order + + def query_order(self, order_id: int = None, client_order_id: int = None) -> bbgo_pb2.QueryOrderResponse: + request = bbgo_pb2.QueryOrderRequest(id=order_id, client_order_id=client_order_id) + response = self.stub.QueryOrder(request) + return response + + def query_orders(self, + exchange: str, + symbol: str, + states: List[str] = None, + order_by: str = 'asc', + group_id: int = None, + pagination: bool = True, + page: int = 0, + limit: int = 100, + offset: int = 0) -> bbgo_pb2.QueryOrdersResponse: + # set default value to ['wait', 'convert'] + states = states or ['wait', 'convert'] + request = bbgo_pb2.QueryOrdersRequest(exchange=exchange, + symbol=symbol, + states=states, + order_by=order_by, + group_id=group_id, + pagination=pagination, + page=page, + limit=limit, + offset=offset) + + reponse = self.stub.QueryOrders(request) + return reponse + + def query_trades(self, + exchange: str, + symbol: str, + timestamp: int, + order_by: str = 'asc', + pagination: bool = True, + page: int = 1, + limit: int = 100, + offset: int = 0) -> bbgo_pb2.QueryTradesResponse: + + request = bbgo_pb2.QueryTradesRequest(exchange=exchange, + symbol=symbol, + timestamp=timestamp, + order_by=order_by, + pagination=pagination, + page=page, + limit=limit, + offset=offset) + response = self.stub.QueryTrades(request) + return response diff --git a/python/bbgo/stream.py b/python/bbgo/stream.py new file mode 100644 index 0000000000..f87d1cdbd3 --- /dev/null +++ b/python/bbgo/stream.py @@ -0,0 +1,80 @@ +import asyncio +from typing import Callable +from typing import List + +import grpc + +import bbgo_pb2 +import bbgo_pb2_grpc +from bbgo.enums import ChannelType +from bbgo.enums import DepthType + +from .data import Event +from .data import MarketDataEvent +from .data import Subscription +from .data import UserDataEvent + + +class Stream(object): + subscriptions: List[Subscription] + + def __init__(self, host: str, port: int): + self.host = host + self.port = port + + self.subscriptions = [] + self.sessions = [] + self.event_handlers = [] + + def subscribe(self, exchange: str, channel: str, symbol: str, depth: str = None, interval: str = None): + subscription = Subscription(exchange=exchange, channel=ChannelType.from_str(channel), symbol=symbol) + + if depth is not None: + subscription.depth = DepthType(depth) + + if interval is not None: + subscription.interval = interval + + self.subscriptions.append(subscription) + + def subscribe_user_data(self, session: str): + self.sessions.append(session) + + def add_event_handler(self, event_handler: Callable) -> None: + self.event_handlers.append(event_handler) + + def fire_event_handlers(self, event: Event) -> None: + for event_handler in self.event_handlers: + event_handler(event) + + @property + def address(self): + return f'{self.host}:{self.port}' + + async def _subscribe_market_data(self): + async with grpc.aio.insecure_channel(self.address) as channel: + stub = bbgo_pb2_grpc.MarketDataServiceStub(channel) + + request = bbgo_pb2.SubscribeRequest(subscriptions=[s.to_pb() for s in self.subscriptions]) + async for response in stub.Subscribe(request): + event = MarketDataEvent.from_pb(response) + self.fire_event_handlers(event) + + async def _subscribe_user_data(self, session: str): + async with grpc.aio.insecure_channel(self.address) as channel: + stub = bbgo_pb2_grpc.UserDataServiceStub(channel) + + request = bbgo_pb2.UserDataRequest(session=session) + async for response in stub.Subscribe(request): + event = UserDataEvent.from_pb(response) + self.fire_event_handlers(event) + + def start(self): + coroutines = [self._subscribe_market_data()] + for session in self.sessions: + coroutines.append(self._subscribe_user_data(session)) + + group = asyncio.gather(*coroutines) + loop = asyncio.get_event_loop() + loop.run_until_complete(group) + loop.close() diff --git a/python/bbgo/utils/__init__.py b/python/bbgo/utils/__init__.py new file mode 100644 index 0000000000..ff84a50651 --- /dev/null +++ b/python/bbgo/utils/__init__.py @@ -0,0 +1,7 @@ +from .convert import parse_number +from .convert import parse_time +from .grpc_utils import get_credentials_from_env +from .grpc_utils import get_grpc_cert_file_from_env +from .grpc_utils import get_grpc_key_file_from_env +from .grpc_utils import get_insecure_channel +from .grpc_utils import get_insecure_channel_from_env diff --git a/python/bbgo/utils/convert.py b/python/bbgo/utils/convert.py new file mode 100644 index 0000000000..60e35a9c7e --- /dev/null +++ b/python/bbgo/utils/convert.py @@ -0,0 +1,20 @@ +from datetime import datetime +from decimal import Decimal +from typing import Union + + +def parse_number(s: Union[str, float]) -> Decimal: + if s is None: + return 0 + + if s == "": + return 0 + + return Decimal(s) + + +def parse_time(t: Union[str, int]) -> datetime: + if isinstance(t, str): + t = int(t) + + return datetime.fromtimestamp(t / 1000) diff --git a/python/bbgo/utils/grpc_utils.py b/python/bbgo/utils/grpc_utils.py new file mode 100644 index 0000000000..ffdb3049ce --- /dev/null +++ b/python/bbgo/utils/grpc_utils.py @@ -0,0 +1,43 @@ +import os + +import grpc + + +def read_binary(f): + with open(f, 'rb') as fp: + return fp.read() + + +def get_grpc_cert_file_from_env(): + cert_file = os.environ.get('BBGO_GRPC_CERT_FILE') + return cert_file + + +def get_grpc_key_file_from_env(): + key_file = os.environ.get('BBGO_GRPC_KEY_FILE') + return key_file + + +def get_credentials_from_env(): + key_file = get_grpc_key_file_from_env() + private_key = read_binary(key_file) + cert_file = get_grpc_cert_file_from_env() + certificate_chain = read_binary(cert_file) + + private_key_certificate_chain_pairs = [(private_key, certificate_chain)] + server_credentials = grpc.ssl_server_credentials(private_key_certificate_chain_pairs) + return server_credentials + + +def get_insecure_channel(host: str, port: int) -> grpc.Channel: + address = f'{host}:{port}' + return grpc.insecure_channel(address) + + +def get_insecure_channel_from_env() -> grpc.Channel: + host = os.environ.get('BBGO_GRPC_HOST') or '127.0.0.1' + port = os.environ.get('BBGO_GRPC_PORT') or 50051 + + address = get_insecure_channel(host, port) + + return grpc.insecure_channel(address) diff --git a/python/bbgo_pb2.py b/python/bbgo_pb2.py new file mode 100644 index 0000000000..c6d0b91807 --- /dev/null +++ b/python/bbgo_pb2.py @@ -0,0 +1,342 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: bbgo.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\nbbgo.proto\x12\x04\x62\x62go\"\x07\n\x05\x45mpty\"2\n\x05\x45rror\x12\x12\n\nerror_code\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t\"\"\n\x0fUserDataRequest\x12\x0f\n\x07session\x18\x01 \x01(\t\"\xc4\x01\n\x08UserData\x12\x0f\n\x07session\x18\x01 \x01(\t\x12\x10\n\x08\x65xchange\x18\x02 \x01(\t\x12\x1e\n\x07\x63hannel\x18\x03 \x01(\x0e\x32\r.bbgo.Channel\x12\x1a\n\x05\x65vent\x18\x04 \x01(\x0e\x32\x0b.bbgo.Event\x12\x1f\n\x08\x62\x61lances\x18\x05 \x03(\x0b\x32\r.bbgo.Balance\x12\x1b\n\x06trades\x18\x06 \x03(\x0b\x32\x0b.bbgo.Trade\x12\x1b\n\x06orders\x18\x07 \x03(\x0b\x32\x0b.bbgo.Order\"=\n\x10SubscribeRequest\x12)\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x12.bbgo.Subscription\"q\n\x0cSubscription\x12\x10\n\x08\x65xchange\x18\x01 \x01(\t\x12\x1e\n\x07\x63hannel\x18\x02 \x01(\x0e\x32\r.bbgo.Channel\x12\x0e\n\x06symbol\x18\x03 \x01(\t\x12\r\n\x05\x64\x65pth\x18\x04 \x01(\t\x12\x10\n\x08interval\x18\x05 \x01(\t\"\xa1\x02\n\nMarketData\x12\x0f\n\x07session\x18\x01 \x01(\t\x12\x10\n\x08\x65xchange\x18\x02 \x01(\t\x12\x0e\n\x06symbol\x18\x03 \x01(\t\x12\x1e\n\x07\x63hannel\x18\x04 \x01(\x0e\x32\r.bbgo.Channel\x12\x1a\n\x05\x65vent\x18\x05 \x01(\x0e\x32\x0b.bbgo.Event\x12\x1a\n\x05\x64\x65pth\x18\x06 \x01(\x0b\x32\x0b.bbgo.Depth\x12\x1a\n\x05kline\x18\x07 \x01(\x0b\x32\x0b.bbgo.KLine\x12\x1c\n\x06ticker\x18\t \x01(\x0b\x32\x0c.bbgo.Ticker\x12\x1b\n\x06trades\x18\x08 \x03(\x0b\x32\x0b.bbgo.Trade\x12\x15\n\rsubscribed_at\x18\x0c \x01(\x03\x12\x1a\n\x05\x65rror\x18\r \x01(\x0b\x32\x0b.bbgo.Error\"k\n\x05\x44\x65pth\x12\x10\n\x08\x65xchange\x18\x01 \x01(\t\x12\x0e\n\x06symbol\x18\x02 \x01(\t\x12\x1f\n\x04\x61sks\x18\x03 \x03(\x0b\x32\x11.bbgo.PriceVolume\x12\x1f\n\x04\x62ids\x18\x04 \x03(\x0b\x32\x11.bbgo.PriceVolume\",\n\x0bPriceVolume\x12\r\n\x05price\x18\x01 \x01(\t\x12\x0e\n\x06volume\x18\x02 \x01(\t\"\xc7\x01\n\x05Trade\x12\x0f\n\x07session\x18\x01 \x01(\t\x12\x10\n\x08\x65xchange\x18\x02 \x01(\t\x12\x0e\n\x06symbol\x18\x03 \x01(\t\x12\n\n\x02id\x18\x04 \x01(\t\x12\r\n\x05price\x18\x05 \x01(\t\x12\x10\n\x08quantity\x18\x06 \x01(\t\x12\x12\n\ncreated_at\x18\x07 \x01(\x03\x12\x18\n\x04side\x18\x08 \x01(\x0e\x32\n.bbgo.Side\x12\x14\n\x0c\x66\x65\x65_currency\x18\t \x01(\t\x12\x0b\n\x03\x66\x65\x65\x18\n \x01(\t\x12\r\n\x05maker\x18\x0b \x01(\x08\"r\n\x06Ticker\x12\x10\n\x08\x65xchange\x18\x01 \x01(\t\x12\x0e\n\x06symbol\x18\x02 \x01(\t\x12\x0c\n\x04open\x18\x03 \x01(\x01\x12\x0c\n\x04high\x18\x04 \x01(\x01\x12\x0b\n\x03low\x18\x05 \x01(\x01\x12\r\n\x05\x63lose\x18\x06 \x01(\x01\x12\x0e\n\x06volume\x18\x07 \x01(\x01\"\x93\x02\n\x05Order\x12\x10\n\x08\x65xchange\x18\x01 \x01(\t\x12\x0e\n\x06symbol\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x01(\t\x12\x18\n\x04side\x18\x04 \x01(\x0e\x32\n.bbgo.Side\x12#\n\norder_type\x18\x05 \x01(\x0e\x32\x0f.bbgo.OrderType\x12\r\n\x05price\x18\x06 \x01(\t\x12\x12\n\nstop_price\x18\x07 \x01(\t\x12\x0e\n\x06status\x18\t \x01(\t\x12\x10\n\x08quantity\x18\x0b \x01(\t\x12\x19\n\x11\x65xecuted_quantity\x18\x0c \x01(\t\x12\x17\n\x0f\x63lient_order_id\x18\x0e \x01(\t\x12\x10\n\x08group_id\x18\x0f \x01(\x03\x12\x12\n\ncreated_at\x18\n \x01(\x03\"\xdf\x01\n\x0bSubmitOrder\x12\x0f\n\x07session\x18\x01 \x01(\t\x12\x10\n\x08\x65xchange\x18\x02 \x01(\t\x12\x0e\n\x06symbol\x18\x03 \x01(\t\x12\x18\n\x04side\x18\x04 \x01(\x0e\x32\n.bbgo.Side\x12\r\n\x05price\x18\x06 \x01(\t\x12\x10\n\x08quantity\x18\x05 \x01(\t\x12\x12\n\nstop_price\x18\x07 \x01(\t\x12#\n\norder_type\x18\x08 \x01(\x0e\x32\x0f.bbgo.OrderType\x12\x17\n\x0f\x63lient_order_id\x18\t \x01(\t\x12\x10\n\x08group_id\x18\n \x01(\x03\"s\n\x07\x42\x61lance\x12\x0f\n\x07session\x18\x01 \x01(\t\x12\x10\n\x08\x65xchange\x18\x02 \x01(\t\x12\x10\n\x08\x63urrency\x18\x03 \x01(\t\x12\x11\n\tavailable\x18\x04 \x01(\t\x12\x0e\n\x06locked\x18\x05 \x01(\t\x12\x10\n\x08\x62orrowed\x18\x06 \x01(\t\"O\n\x12SubmitOrderRequest\x12\x0f\n\x07session\x18\x01 \x01(\t\x12(\n\rsubmit_orders\x18\x02 \x03(\x0b\x32\x11.bbgo.SubmitOrder\"_\n\x13SubmitOrderResponse\x12\x0f\n\x07session\x18\x01 \x01(\t\x12\x1b\n\x06orders\x18\x02 \x03(\x0b\x32\x0b.bbgo.Order\x12\x1a\n\x05\x65rror\x18\x03 \x01(\x0b\x32\x0b.bbgo.Error\"P\n\x12\x43\x61ncelOrderRequest\x12\x0f\n\x07session\x18\x01 \x01(\t\x12\x10\n\x08order_id\x18\x02 \x01(\t\x12\x17\n\x0f\x63lient_order_id\x18\x03 \x01(\t\"M\n\x13\x43\x61ncelOrderResponse\x12\x1a\n\x05order\x18\x01 \x01(\x0b\x32\x0b.bbgo.Order\x12\x1a\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x0b.bbgo.Error\"I\n\x11QueryOrderRequest\x12\x0f\n\x07session\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\t\x12\x17\n\x0f\x63lient_order_id\x18\x03 \x01(\t\"L\n\x12QueryOrderResponse\x12\x1a\n\x05order\x18\x01 \x01(\x0b\x32\x0b.bbgo.Order\x12\x1a\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x0b.bbgo.Error\"\xa9\x01\n\x12QueryOrdersRequest\x12\x0f\n\x07session\x18\x01 \x01(\t\x12\x0e\n\x06symbol\x18\x02 \x01(\t\x12\r\n\x05state\x18\x03 \x03(\t\x12\x10\n\x08order_by\x18\x04 \x01(\t\x12\x10\n\x08group_id\x18\x05 \x01(\x03\x12\x12\n\npagination\x18\x06 \x01(\x08\x12\x0c\n\x04page\x18\x07 \x01(\x03\x12\r\n\x05limit\x18\x08 \x01(\x03\x12\x0e\n\x06offset\x18\t \x01(\x03\"N\n\x13QueryOrdersResponse\x12\x1b\n\x06orders\x18\x01 \x03(\x0b\x32\x0b.bbgo.Order\x12\x1a\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x0b.bbgo.Error\"\xb6\x01\n\x12QueryTradesRequest\x12\x10\n\x08\x65xchange\x18\x01 \x01(\t\x12\x0e\n\x06symbol\x18\x02 \x01(\t\x12\x11\n\ttimestamp\x18\x03 \x01(\x03\x12\x0c\n\x04\x66rom\x18\x04 \x01(\x03\x12\n\n\x02to\x18\x05 \x01(\x03\x12\x10\n\x08order_by\x18\x06 \x01(\t\x12\x12\n\npagination\x18\x07 \x01(\x08\x12\x0c\n\x04page\x18\x08 \x01(\x03\x12\r\n\x05limit\x18\t \x01(\x03\x12\x0e\n\x06offset\x18\n \x01(\x03\"N\n\x13QueryTradesResponse\x12\x1b\n\x06trades\x18\x01 \x03(\x0b\x32\x0b.bbgo.Trade\x12\x1a\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x0b.bbgo.Error\"}\n\x12QueryKLinesRequest\x12\x10\n\x08\x65xchange\x18\x01 \x01(\t\x12\x0e\n\x06symbol\x18\x02 \x01(\t\x12\x10\n\x08interval\x18\x03 \x01(\t\x12\x12\n\nstart_time\x18\x04 \x01(\x03\x12\x10\n\x08\x65nd_time\x18\x05 \x01(\x03\x12\r\n\x05limit\x18\x06 \x01(\x03\"N\n\x13QueryKLinesResponse\x12\x1b\n\x06klines\x18\x01 \x03(\x0b\x32\x0b.bbgo.KLine\x12\x1a\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x0b.bbgo.Error\"\xce\x01\n\x05KLine\x12\x0f\n\x07session\x18\x01 \x01(\t\x12\x10\n\x08\x65xchange\x18\x02 \x01(\t\x12\x0e\n\x06symbol\x18\x03 \x01(\t\x12\x0c\n\x04open\x18\x04 \x01(\t\x12\x0c\n\x04high\x18\x05 \x01(\t\x12\x0b\n\x03low\x18\x06 \x01(\t\x12\r\n\x05\x63lose\x18\x07 \x01(\t\x12\x0e\n\x06volume\x18\x08 \x01(\t\x12\x14\n\x0cquote_volume\x18\t \x01(\t\x12\x12\n\nstart_time\x18\n \x01(\x03\x12\x10\n\x08\x65nd_time\x18\x0b \x01(\x03\x12\x0e\n\x06\x63losed\x18\x0c \x01(\x08*n\n\x05\x45vent\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0e\n\nSUBSCRIBED\x10\x01\x12\x10\n\x0cUNSUBSCRIBED\x10\x02\x12\x0c\n\x08SNAPSHOT\x10\x03\x12\n\n\x06UPDATE\x10\x04\x12\x11\n\rAUTHENTICATED\x10\x05\x12\t\n\x05\x45RROR\x10\x63*M\n\x07\x43hannel\x12\x08\n\x04\x42OOK\x10\x00\x12\t\n\x05TRADE\x10\x01\x12\n\n\x06TICKER\x10\x02\x12\t\n\x05KLINE\x10\x03\x12\x0b\n\x07\x42\x41LANCE\x10\x04\x12\t\n\x05ORDER\x10\x05*\x19\n\x04Side\x12\x07\n\x03\x42UY\x10\x00\x12\x08\n\x04SELL\x10\x01*a\n\tOrderType\x12\n\n\x06MARKET\x10\x00\x12\t\n\x05LIMIT\x10\x01\x12\x0f\n\x0bSTOP_MARKET\x10\x02\x12\x0e\n\nSTOP_LIMIT\x10\x03\x12\r\n\tPOST_ONLY\x10\x04\x12\r\n\tIOC_LIMIT\x10\x05\x32\x94\x01\n\x11MarketDataService\x12\x39\n\tSubscribe\x12\x16.bbgo.SubscribeRequest\x1a\x10.bbgo.MarketData\"\x00\x30\x01\x12\x44\n\x0bQueryKLines\x12\x18.bbgo.QueryKLinesRequest\x1a\x19.bbgo.QueryKLinesResponse\"\x00\x32I\n\x0fUserDataService\x12\x36\n\tSubscribe\x12\x15.bbgo.UserDataRequest\x1a\x0e.bbgo.UserData\"\x00\x30\x01\x32\xeb\x02\n\x0eTradingService\x12\x44\n\x0bSubmitOrder\x12\x18.bbgo.SubmitOrderRequest\x1a\x19.bbgo.SubmitOrderResponse\"\x00\x12\x44\n\x0b\x43\x61ncelOrder\x12\x18.bbgo.CancelOrderRequest\x1a\x19.bbgo.CancelOrderResponse\"\x00\x12\x41\n\nQueryOrder\x12\x17.bbgo.QueryOrderRequest\x1a\x18.bbgo.QueryOrderResponse\"\x00\x12\x44\n\x0bQueryOrders\x12\x18.bbgo.QueryOrdersRequest\x1a\x19.bbgo.QueryOrdersResponse\"\x00\x12\x44\n\x0bQueryTrades\x12\x18.bbgo.QueryTradesRequest\x1a\x19.bbgo.QueryTradesResponse\"\x00\x42\x07Z\x05../pbb\x06proto3') + +_EVENT = DESCRIPTOR.enum_types_by_name['Event'] +Event = enum_type_wrapper.EnumTypeWrapper(_EVENT) +_CHANNEL = DESCRIPTOR.enum_types_by_name['Channel'] +Channel = enum_type_wrapper.EnumTypeWrapper(_CHANNEL) +_SIDE = DESCRIPTOR.enum_types_by_name['Side'] +Side = enum_type_wrapper.EnumTypeWrapper(_SIDE) +_ORDERTYPE = DESCRIPTOR.enum_types_by_name['OrderType'] +OrderType = enum_type_wrapper.EnumTypeWrapper(_ORDERTYPE) +UNKNOWN = 0 +SUBSCRIBED = 1 +UNSUBSCRIBED = 2 +SNAPSHOT = 3 +UPDATE = 4 +AUTHENTICATED = 5 +ERROR = 99 +BOOK = 0 +TRADE = 1 +TICKER = 2 +KLINE = 3 +BALANCE = 4 +ORDER = 5 +BUY = 0 +SELL = 1 +MARKET = 0 +LIMIT = 1 +STOP_MARKET = 2 +STOP_LIMIT = 3 +POST_ONLY = 4 +IOC_LIMIT = 5 + + +_EMPTY = DESCRIPTOR.message_types_by_name['Empty'] +_ERROR = DESCRIPTOR.message_types_by_name['Error'] +_USERDATAREQUEST = DESCRIPTOR.message_types_by_name['UserDataRequest'] +_USERDATA = DESCRIPTOR.message_types_by_name['UserData'] +_SUBSCRIBEREQUEST = DESCRIPTOR.message_types_by_name['SubscribeRequest'] +_SUBSCRIPTION = DESCRIPTOR.message_types_by_name['Subscription'] +_MARKETDATA = DESCRIPTOR.message_types_by_name['MarketData'] +_DEPTH = DESCRIPTOR.message_types_by_name['Depth'] +_PRICEVOLUME = DESCRIPTOR.message_types_by_name['PriceVolume'] +_TRADE = DESCRIPTOR.message_types_by_name['Trade'] +_TICKER = DESCRIPTOR.message_types_by_name['Ticker'] +_ORDER = DESCRIPTOR.message_types_by_name['Order'] +_SUBMITORDER = DESCRIPTOR.message_types_by_name['SubmitOrder'] +_BALANCE = DESCRIPTOR.message_types_by_name['Balance'] +_SUBMITORDERREQUEST = DESCRIPTOR.message_types_by_name['SubmitOrderRequest'] +_SUBMITORDERRESPONSE = DESCRIPTOR.message_types_by_name['SubmitOrderResponse'] +_CANCELORDERREQUEST = DESCRIPTOR.message_types_by_name['CancelOrderRequest'] +_CANCELORDERRESPONSE = DESCRIPTOR.message_types_by_name['CancelOrderResponse'] +_QUERYORDERREQUEST = DESCRIPTOR.message_types_by_name['QueryOrderRequest'] +_QUERYORDERRESPONSE = DESCRIPTOR.message_types_by_name['QueryOrderResponse'] +_QUERYORDERSREQUEST = DESCRIPTOR.message_types_by_name['QueryOrdersRequest'] +_QUERYORDERSRESPONSE = DESCRIPTOR.message_types_by_name['QueryOrdersResponse'] +_QUERYTRADESREQUEST = DESCRIPTOR.message_types_by_name['QueryTradesRequest'] +_QUERYTRADESRESPONSE = DESCRIPTOR.message_types_by_name['QueryTradesResponse'] +_QUERYKLINESREQUEST = DESCRIPTOR.message_types_by_name['QueryKLinesRequest'] +_QUERYKLINESRESPONSE = DESCRIPTOR.message_types_by_name['QueryKLinesResponse'] +_KLINE = DESCRIPTOR.message_types_by_name['KLine'] +Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), { + 'DESCRIPTOR' : _EMPTY, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.Empty) + }) +_sym_db.RegisterMessage(Empty) + +Error = _reflection.GeneratedProtocolMessageType('Error', (_message.Message,), { + 'DESCRIPTOR' : _ERROR, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.Error) + }) +_sym_db.RegisterMessage(Error) + +UserDataRequest = _reflection.GeneratedProtocolMessageType('UserDataRequest', (_message.Message,), { + 'DESCRIPTOR' : _USERDATAREQUEST, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.UserDataRequest) + }) +_sym_db.RegisterMessage(UserDataRequest) + +UserData = _reflection.GeneratedProtocolMessageType('UserData', (_message.Message,), { + 'DESCRIPTOR' : _USERDATA, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.UserData) + }) +_sym_db.RegisterMessage(UserData) + +SubscribeRequest = _reflection.GeneratedProtocolMessageType('SubscribeRequest', (_message.Message,), { + 'DESCRIPTOR' : _SUBSCRIBEREQUEST, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.SubscribeRequest) + }) +_sym_db.RegisterMessage(SubscribeRequest) + +Subscription = _reflection.GeneratedProtocolMessageType('Subscription', (_message.Message,), { + 'DESCRIPTOR' : _SUBSCRIPTION, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.Subscription) + }) +_sym_db.RegisterMessage(Subscription) + +MarketData = _reflection.GeneratedProtocolMessageType('MarketData', (_message.Message,), { + 'DESCRIPTOR' : _MARKETDATA, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.MarketData) + }) +_sym_db.RegisterMessage(MarketData) + +Depth = _reflection.GeneratedProtocolMessageType('Depth', (_message.Message,), { + 'DESCRIPTOR' : _DEPTH, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.Depth) + }) +_sym_db.RegisterMessage(Depth) + +PriceVolume = _reflection.GeneratedProtocolMessageType('PriceVolume', (_message.Message,), { + 'DESCRIPTOR' : _PRICEVOLUME, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.PriceVolume) + }) +_sym_db.RegisterMessage(PriceVolume) + +Trade = _reflection.GeneratedProtocolMessageType('Trade', (_message.Message,), { + 'DESCRIPTOR' : _TRADE, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.Trade) + }) +_sym_db.RegisterMessage(Trade) + +Ticker = _reflection.GeneratedProtocolMessageType('Ticker', (_message.Message,), { + 'DESCRIPTOR' : _TICKER, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.Ticker) + }) +_sym_db.RegisterMessage(Ticker) + +Order = _reflection.GeneratedProtocolMessageType('Order', (_message.Message,), { + 'DESCRIPTOR' : _ORDER, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.Order) + }) +_sym_db.RegisterMessage(Order) + +SubmitOrder = _reflection.GeneratedProtocolMessageType('SubmitOrder', (_message.Message,), { + 'DESCRIPTOR' : _SUBMITORDER, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.SubmitOrder) + }) +_sym_db.RegisterMessage(SubmitOrder) + +Balance = _reflection.GeneratedProtocolMessageType('Balance', (_message.Message,), { + 'DESCRIPTOR' : _BALANCE, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.Balance) + }) +_sym_db.RegisterMessage(Balance) + +SubmitOrderRequest = _reflection.GeneratedProtocolMessageType('SubmitOrderRequest', (_message.Message,), { + 'DESCRIPTOR' : _SUBMITORDERREQUEST, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.SubmitOrderRequest) + }) +_sym_db.RegisterMessage(SubmitOrderRequest) + +SubmitOrderResponse = _reflection.GeneratedProtocolMessageType('SubmitOrderResponse', (_message.Message,), { + 'DESCRIPTOR' : _SUBMITORDERRESPONSE, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.SubmitOrderResponse) + }) +_sym_db.RegisterMessage(SubmitOrderResponse) + +CancelOrderRequest = _reflection.GeneratedProtocolMessageType('CancelOrderRequest', (_message.Message,), { + 'DESCRIPTOR' : _CANCELORDERREQUEST, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.CancelOrderRequest) + }) +_sym_db.RegisterMessage(CancelOrderRequest) + +CancelOrderResponse = _reflection.GeneratedProtocolMessageType('CancelOrderResponse', (_message.Message,), { + 'DESCRIPTOR' : _CANCELORDERRESPONSE, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.CancelOrderResponse) + }) +_sym_db.RegisterMessage(CancelOrderResponse) + +QueryOrderRequest = _reflection.GeneratedProtocolMessageType('QueryOrderRequest', (_message.Message,), { + 'DESCRIPTOR' : _QUERYORDERREQUEST, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.QueryOrderRequest) + }) +_sym_db.RegisterMessage(QueryOrderRequest) + +QueryOrderResponse = _reflection.GeneratedProtocolMessageType('QueryOrderResponse', (_message.Message,), { + 'DESCRIPTOR' : _QUERYORDERRESPONSE, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.QueryOrderResponse) + }) +_sym_db.RegisterMessage(QueryOrderResponse) + +QueryOrdersRequest = _reflection.GeneratedProtocolMessageType('QueryOrdersRequest', (_message.Message,), { + 'DESCRIPTOR' : _QUERYORDERSREQUEST, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.QueryOrdersRequest) + }) +_sym_db.RegisterMessage(QueryOrdersRequest) + +QueryOrdersResponse = _reflection.GeneratedProtocolMessageType('QueryOrdersResponse', (_message.Message,), { + 'DESCRIPTOR' : _QUERYORDERSRESPONSE, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.QueryOrdersResponse) + }) +_sym_db.RegisterMessage(QueryOrdersResponse) + +QueryTradesRequest = _reflection.GeneratedProtocolMessageType('QueryTradesRequest', (_message.Message,), { + 'DESCRIPTOR' : _QUERYTRADESREQUEST, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.QueryTradesRequest) + }) +_sym_db.RegisterMessage(QueryTradesRequest) + +QueryTradesResponse = _reflection.GeneratedProtocolMessageType('QueryTradesResponse', (_message.Message,), { + 'DESCRIPTOR' : _QUERYTRADESRESPONSE, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.QueryTradesResponse) + }) +_sym_db.RegisterMessage(QueryTradesResponse) + +QueryKLinesRequest = _reflection.GeneratedProtocolMessageType('QueryKLinesRequest', (_message.Message,), { + 'DESCRIPTOR' : _QUERYKLINESREQUEST, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.QueryKLinesRequest) + }) +_sym_db.RegisterMessage(QueryKLinesRequest) + +QueryKLinesResponse = _reflection.GeneratedProtocolMessageType('QueryKLinesResponse', (_message.Message,), { + 'DESCRIPTOR' : _QUERYKLINESRESPONSE, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.QueryKLinesResponse) + }) +_sym_db.RegisterMessage(QueryKLinesResponse) + +KLine = _reflection.GeneratedProtocolMessageType('KLine', (_message.Message,), { + 'DESCRIPTOR' : _KLINE, + '__module__' : 'bbgo_pb2' + # @@protoc_insertion_point(class_scope:bbgo.KLine) + }) +_sym_db.RegisterMessage(KLine) + +_MARKETDATASERVICE = DESCRIPTOR.services_by_name['MarketDataService'] +_USERDATASERVICE = DESCRIPTOR.services_by_name['UserDataService'] +_TRADINGSERVICE = DESCRIPTOR.services_by_name['TradingService'] +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'Z\005../pb' + _EVENT._serialized_start=3305 + _EVENT._serialized_end=3415 + _CHANNEL._serialized_start=3417 + _CHANNEL._serialized_end=3494 + _SIDE._serialized_start=3496 + _SIDE._serialized_end=3521 + _ORDERTYPE._serialized_start=3523 + _ORDERTYPE._serialized_end=3620 + _EMPTY._serialized_start=20 + _EMPTY._serialized_end=27 + _ERROR._serialized_start=29 + _ERROR._serialized_end=79 + _USERDATAREQUEST._serialized_start=81 + _USERDATAREQUEST._serialized_end=115 + _USERDATA._serialized_start=118 + _USERDATA._serialized_end=314 + _SUBSCRIBEREQUEST._serialized_start=316 + _SUBSCRIBEREQUEST._serialized_end=377 + _SUBSCRIPTION._serialized_start=379 + _SUBSCRIPTION._serialized_end=492 + _MARKETDATA._serialized_start=495 + _MARKETDATA._serialized_end=784 + _DEPTH._serialized_start=786 + _DEPTH._serialized_end=893 + _PRICEVOLUME._serialized_start=895 + _PRICEVOLUME._serialized_end=939 + _TRADE._serialized_start=942 + _TRADE._serialized_end=1141 + _TICKER._serialized_start=1143 + _TICKER._serialized_end=1257 + _ORDER._serialized_start=1260 + _ORDER._serialized_end=1535 + _SUBMITORDER._serialized_start=1538 + _SUBMITORDER._serialized_end=1761 + _BALANCE._serialized_start=1763 + _BALANCE._serialized_end=1878 + _SUBMITORDERREQUEST._serialized_start=1880 + _SUBMITORDERREQUEST._serialized_end=1959 + _SUBMITORDERRESPONSE._serialized_start=1961 + _SUBMITORDERRESPONSE._serialized_end=2056 + _CANCELORDERREQUEST._serialized_start=2058 + _CANCELORDERREQUEST._serialized_end=2138 + _CANCELORDERRESPONSE._serialized_start=2140 + _CANCELORDERRESPONSE._serialized_end=2217 + _QUERYORDERREQUEST._serialized_start=2219 + _QUERYORDERREQUEST._serialized_end=2292 + _QUERYORDERRESPONSE._serialized_start=2294 + _QUERYORDERRESPONSE._serialized_end=2370 + _QUERYORDERSREQUEST._serialized_start=2373 + _QUERYORDERSREQUEST._serialized_end=2542 + _QUERYORDERSRESPONSE._serialized_start=2544 + _QUERYORDERSRESPONSE._serialized_end=2622 + _QUERYTRADESREQUEST._serialized_start=2625 + _QUERYTRADESREQUEST._serialized_end=2807 + _QUERYTRADESRESPONSE._serialized_start=2809 + _QUERYTRADESRESPONSE._serialized_end=2887 + _QUERYKLINESREQUEST._serialized_start=2889 + _QUERYKLINESREQUEST._serialized_end=3014 + _QUERYKLINESRESPONSE._serialized_start=3016 + _QUERYKLINESRESPONSE._serialized_end=3094 + _KLINE._serialized_start=3097 + _KLINE._serialized_end=3303 + _MARKETDATASERVICE._serialized_start=3623 + _MARKETDATASERVICE._serialized_end=3771 + _USERDATASERVICE._serialized_start=3773 + _USERDATASERVICE._serialized_end=3846 + _TRADINGSERVICE._serialized_start=3849 + _TRADINGSERVICE._serialized_end=4212 +# @@protoc_insertion_point(module_scope) diff --git a/python/bbgo_pb2_grpc.py b/python/bbgo_pb2_grpc.py new file mode 100644 index 0000000000..0b1e254485 --- /dev/null +++ b/python/bbgo_pb2_grpc.py @@ -0,0 +1,354 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +import bbgo_pb2 as bbgo__pb2 + + +class MarketDataServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Subscribe = channel.unary_stream( + '/bbgo.MarketDataService/Subscribe', + request_serializer=bbgo__pb2.SubscribeRequest.SerializeToString, + response_deserializer=bbgo__pb2.MarketData.FromString, + ) + self.QueryKLines = channel.unary_unary( + '/bbgo.MarketDataService/QueryKLines', + request_serializer=bbgo__pb2.QueryKLinesRequest.SerializeToString, + response_deserializer=bbgo__pb2.QueryKLinesResponse.FromString, + ) + + +class MarketDataServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def Subscribe(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def QueryKLines(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_MarketDataServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Subscribe': grpc.unary_stream_rpc_method_handler( + servicer.Subscribe, + request_deserializer=bbgo__pb2.SubscribeRequest.FromString, + response_serializer=bbgo__pb2.MarketData.SerializeToString, + ), + 'QueryKLines': grpc.unary_unary_rpc_method_handler( + servicer.QueryKLines, + request_deserializer=bbgo__pb2.QueryKLinesRequest.FromString, + response_serializer=bbgo__pb2.QueryKLinesResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'bbgo.MarketDataService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class MarketDataService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def Subscribe(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_stream(request, target, '/bbgo.MarketDataService/Subscribe', + bbgo__pb2.SubscribeRequest.SerializeToString, + bbgo__pb2.MarketData.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def QueryKLines(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/bbgo.MarketDataService/QueryKLines', + bbgo__pb2.QueryKLinesRequest.SerializeToString, + bbgo__pb2.QueryKLinesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + +class UserDataServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Subscribe = channel.unary_stream( + '/bbgo.UserDataService/Subscribe', + request_serializer=bbgo__pb2.UserDataRequest.SerializeToString, + response_deserializer=bbgo__pb2.UserData.FromString, + ) + + +class UserDataServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def Subscribe(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_UserDataServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Subscribe': grpc.unary_stream_rpc_method_handler( + servicer.Subscribe, + request_deserializer=bbgo__pb2.UserDataRequest.FromString, + response_serializer=bbgo__pb2.UserData.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'bbgo.UserDataService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class UserDataService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def Subscribe(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_stream(request, target, '/bbgo.UserDataService/Subscribe', + bbgo__pb2.UserDataRequest.SerializeToString, + bbgo__pb2.UserData.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + +class TradingServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.SubmitOrder = channel.unary_unary( + '/bbgo.TradingService/SubmitOrder', + request_serializer=bbgo__pb2.SubmitOrderRequest.SerializeToString, + response_deserializer=bbgo__pb2.SubmitOrderResponse.FromString, + ) + self.CancelOrder = channel.unary_unary( + '/bbgo.TradingService/CancelOrder', + request_serializer=bbgo__pb2.CancelOrderRequest.SerializeToString, + response_deserializer=bbgo__pb2.CancelOrderResponse.FromString, + ) + self.QueryOrder = channel.unary_unary( + '/bbgo.TradingService/QueryOrder', + request_serializer=bbgo__pb2.QueryOrderRequest.SerializeToString, + response_deserializer=bbgo__pb2.QueryOrderResponse.FromString, + ) + self.QueryOrders = channel.unary_unary( + '/bbgo.TradingService/QueryOrders', + request_serializer=bbgo__pb2.QueryOrdersRequest.SerializeToString, + response_deserializer=bbgo__pb2.QueryOrdersResponse.FromString, + ) + self.QueryTrades = channel.unary_unary( + '/bbgo.TradingService/QueryTrades', + request_serializer=bbgo__pb2.QueryTradesRequest.SerializeToString, + response_deserializer=bbgo__pb2.QueryTradesResponse.FromString, + ) + + +class TradingServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def SubmitOrder(self, request, context): + """request-response + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CancelOrder(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def QueryOrder(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def QueryOrders(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def QueryTrades(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_TradingServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'SubmitOrder': grpc.unary_unary_rpc_method_handler( + servicer.SubmitOrder, + request_deserializer=bbgo__pb2.SubmitOrderRequest.FromString, + response_serializer=bbgo__pb2.SubmitOrderResponse.SerializeToString, + ), + 'CancelOrder': grpc.unary_unary_rpc_method_handler( + servicer.CancelOrder, + request_deserializer=bbgo__pb2.CancelOrderRequest.FromString, + response_serializer=bbgo__pb2.CancelOrderResponse.SerializeToString, + ), + 'QueryOrder': grpc.unary_unary_rpc_method_handler( + servicer.QueryOrder, + request_deserializer=bbgo__pb2.QueryOrderRequest.FromString, + response_serializer=bbgo__pb2.QueryOrderResponse.SerializeToString, + ), + 'QueryOrders': grpc.unary_unary_rpc_method_handler( + servicer.QueryOrders, + request_deserializer=bbgo__pb2.QueryOrdersRequest.FromString, + response_serializer=bbgo__pb2.QueryOrdersResponse.SerializeToString, + ), + 'QueryTrades': grpc.unary_unary_rpc_method_handler( + servicer.QueryTrades, + request_deserializer=bbgo__pb2.QueryTradesRequest.FromString, + response_serializer=bbgo__pb2.QueryTradesResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'bbgo.TradingService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class TradingService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def SubmitOrder(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/bbgo.TradingService/SubmitOrder', + bbgo__pb2.SubmitOrderRequest.SerializeToString, + bbgo__pb2.SubmitOrderResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def CancelOrder(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/bbgo.TradingService/CancelOrder', + bbgo__pb2.CancelOrderRequest.SerializeToString, + bbgo__pb2.CancelOrderResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def QueryOrder(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/bbgo.TradingService/QueryOrder', + bbgo__pb2.QueryOrderRequest.SerializeToString, + bbgo__pb2.QueryOrderResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def QueryOrders(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/bbgo.TradingService/QueryOrders', + bbgo__pb2.QueryOrdersRequest.SerializeToString, + bbgo__pb2.QueryOrdersResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def QueryTrades(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/bbgo.TradingService/QueryTrades', + bbgo__pb2.QueryTradesRequest.SerializeToString, + bbgo__pb2.QueryTradesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/python/examples/query_klines.py b/python/examples/query_klines.py new file mode 100644 index 0000000000..d03be201f3 --- /dev/null +++ b/python/examples/query_klines.py @@ -0,0 +1,24 @@ +import click + +from bbgo import MarketService + + +@click.command() +@click.option('--host', default='127.0.0.1') +@click.option('--port', default=50051) +def main(host, port): + service = MarketService(host, port) + + klines = service.query_klines( + exchange='binance', + symbol='BTCUSDT', + interval='1m', + limit=10, + ) + + for kline in klines: + print(kline) + + +if __name__ == '__main__': + main() diff --git a/python/examples/stream.py b/python/examples/stream.py new file mode 100644 index 0000000000..571ceb5d9b --- /dev/null +++ b/python/examples/stream.py @@ -0,0 +1,28 @@ +import click +from loguru import logger + +from bbgo import Stream +from bbgo.data import Event +from bbgo.handlers import UpdateHandler + + +class LogBook(UpdateHandler): + + def handle(self, event: Event) -> None: + logger.info(event) + + +@click.command() +@click.option('--host', default='127.0.0.1') +@click.option('--port', default=50051) +def main(host, port): + stream = Stream(host, port) + stream.subscribe('max', 'book', 'BTCUSDT', 'full') + stream.subscribe('max', 'book', 'ETHUSDT', 'full') + stream.subscribe_user_data('max') + stream.add_event_handler(LogBook()) + stream.start() + + +if __name__ == '__main__': + main() diff --git a/python/examples/subscribe.py b/python/examples/subscribe.py new file mode 100644 index 0000000000..abd0ff9639 --- /dev/null +++ b/python/examples/subscribe.py @@ -0,0 +1,25 @@ +import click +from loguru import logger + +from bbgo import MarketService +from bbgo.data import Subscription +from bbgo.enums import ChannelType +from bbgo.enums import DepthType + + +@click.command() +@click.option('--host', default='127.0.0.1') +@click.option('--port', default=50051) +def main(host, port): + subscriptions = [ + Subscription('binance', ChannelType.BOOK, symbol='BTCUSDT', depth=DepthType.FULL), + ] + + service = MarketService(host, port) + response_iter = service.subscribe(subscriptions) + for response in response_iter: + logger.info(response) + + +if __name__ == '__main__': + main() diff --git a/python/examples/subscribe_user_data.py b/python/examples/subscribe_user_data.py new file mode 100644 index 0000000000..9db07d6df7 --- /dev/null +++ b/python/examples/subscribe_user_data.py @@ -0,0 +1,24 @@ +import grpc +from loguru import logger + +import bbgo_pb2 +import bbgo_pb2_grpc +from bbgo.data import UserDataEvent + + +def main(): + host = '127.0.0.1' + port = 50051 + address = f'{host}:{port}' + channel = grpc.insecure_channel(address) + stub = bbgo_pb2_grpc.UserDataServiceStub(channel) + + request = bbgo_pb2.UserDataRequest(session='max') + response_iter = stub.Subscribe(request) + for response in response_iter: + event = UserDataEvent.from_pb(response) + logger.info(event) + + +if __name__ == '__main__': + main() diff --git a/python/poetry.lock b/python/poetry.lock new file mode 100644 index 0000000000..74c6544889 --- /dev/null +++ b/python/poetry.lock @@ -0,0 +1,423 @@ +[[package]] +name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "attrs" +version = "21.4.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] + +[[package]] +name = "click" +version = "8.0.4" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "flake8" +version = "4.0.1" +description = "the modular source code checker: pep8 pyflakes and co" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.8.0,<2.9.0" +pyflakes = ">=2.4.0,<2.5.0" + +[[package]] +name = "grpcio" +version = "1.44.0" +description = "HTTP/2-based RPC framework" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +six = ">=1.5.2" + +[package.extras] +protobuf = ["grpcio-tools (>=1.44.0)"] + +[[package]] +name = "grpcio-tools" +version = "1.44.0" +description = "Protobuf code generator for gRPC" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +grpcio = ">=1.44.0" +protobuf = ">=3.5.0.post1,<4.0dev" + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "loguru" +version = "0.6.0" +description = "Python logging made (stupidly) simple" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} +win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} + +[package.extras] +dev = ["colorama (>=0.3.4)", "docutils (==0.16)", "flake8 (>=3.7.7)", "tox (>=3.9.0)", "pytest (>=4.6.2)", "pytest-cov (>=2.7.1)", "black (>=19.10b0)", "isort (>=5.1.1)", "Sphinx (>=4.1.1)", "sphinx-autobuild (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)"] + +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "protobuf" +version = "3.19.4" +description = "Protocol Buffers" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pycodestyle" +version = "2.8.0" +description = "Python style guide checker" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pyflakes" +version = "2.4.0" +description = "passive checker of Python programs" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pyparsing" +version = "3.0.7" +description = "Python parsing module" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pytest" +version = "7.0.1" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +tomli = ">=1.0.0" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "win32-setctime" +version = "1.1.0" +description = "A small Python utility to set file creation time on Windows" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +dev = ["pytest (>=4.6.2)", "black (>=19.3b0)"] + +[metadata] +lock-version = "1.1" +python-versions = "^3.8" +content-hash = "bfda359d4e023f07cd8df05859450215e9f560f50b4a77a8aa8436ac42a74fe3" + +[metadata.files] +atomicwrites = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] +attrs = [ + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, +] +click = [ + {file = "click-8.0.4-py3-none-any.whl", hash = "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1"}, + {file = "click-8.0.4.tar.gz", hash = "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +flake8 = [ + {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, + {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, +] +grpcio = [ + {file = "grpcio-1.44.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:11f811c0fffd84fca747fbc742464575e5eb130fd4fb4d6012ccc34febd001db"}, + {file = "grpcio-1.44.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:9a86a91201f8345502ea81dee0a55ae13add5fafadf109b17acd858fe8239651"}, + {file = "grpcio-1.44.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:5f3c54ebb5d9633a557335c01d88d3d4928e9b1b131692283b6184da1edbec0b"}, + {file = "grpcio-1.44.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d47553b8e86ab1e59b0185ba6491a187f94a0239f414c8fc867a22b0405b798"}, + {file = "grpcio-1.44.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1e22d3a510438b7f3365c0071b810672d09febac6e8ca8a47eab657ae5f347b"}, + {file = "grpcio-1.44.0-cp310-cp310-win32.whl", hash = "sha256:41036a574cab3468f24d41d6ed2b52588fb85ed60f8feaa925d7e424a250740b"}, + {file = "grpcio-1.44.0-cp310-cp310-win_amd64.whl", hash = "sha256:4ee51964edfd0a1293a95bb0d72d134ecf889379d90d2612cbf663623ce832b4"}, + {file = "grpcio-1.44.0-cp36-cp36m-linux_armv7l.whl", hash = "sha256:e2149077d71e060678130644670389ddf1491200bcea16c5560d4ccdc65e3f2e"}, + {file = "grpcio-1.44.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:0ac72d4b953b76924f8fa21436af060d7e6d8581e279863f30ee14f20751ac27"}, + {file = "grpcio-1.44.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:5c30a9a7d3a05920368a60b080cbbeaf06335303be23ac244034c71c03a0fd24"}, + {file = "grpcio-1.44.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:05467acd391e3fffb05991c76cb2ed2fa1309d0e3815ac379764bc5670b4b5d4"}, + {file = "grpcio-1.44.0-cp36-cp36m-manylinux_2_17_aarch64.whl", hash = "sha256:b81dc7894062ed2d25b74a2725aaa0a6895ce97ce854f432fe4e87cad5a07316"}, + {file = "grpcio-1.44.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46d4843192e7d36278884282e100b8f305cf37d1b3d8c6b4f736d4454640a069"}, + {file = "grpcio-1.44.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:898c159148f27e23c08a337fb80d31ece6b76bb24f359d83929460d813665b74"}, + {file = "grpcio-1.44.0-cp36-cp36m-win32.whl", hash = "sha256:b8d852329336c584c636caa9c2db990f3a332b19bc86a80f4646b58d27c142db"}, + {file = "grpcio-1.44.0-cp36-cp36m-win_amd64.whl", hash = "sha256:790d7493337558ae168477d1be3178f4c9b8f91d8cd9b8b719d06fd9b2d48836"}, + {file = "grpcio-1.44.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:cd61b52d9cf8fcf8d9628c0b640b9e44fdc5e93d989cc268086a858540ed370c"}, + {file = "grpcio-1.44.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:14eefcf623890f3f7dd7831decd2a2116652b5ce1e0f1d4b464b8f52110743b0"}, + {file = "grpcio-1.44.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:bebe90b8020b4248e5a2076b56154cc6ff45691bbbe980579fc9db26717ac968"}, + {file = "grpcio-1.44.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:89b390b1c0de909965280d175c53128ce2f0f4f5c0f011382243dd7f2f894060"}, + {file = "grpcio-1.44.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:c122dac5cb299b8ad7308d61bd9fe0413de13b0347cce465398436b3fdf1f609"}, + {file = "grpcio-1.44.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6641a28cc826a92ef717201cca9a035c34a0185e38b0c93f3ce5f01a01a1570a"}, + {file = "grpcio-1.44.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb0a3e0e64843441793923d9532a3a23907b07b2a1e0a7a31f186dc185bb772"}, + {file = "grpcio-1.44.0-cp37-cp37m-win32.whl", hash = "sha256:be857b7ec2ac43455156e6ba89262f7d7ae60227049427d01a3fecd218a3f88d"}, + {file = "grpcio-1.44.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f6a9cf0e77f72f2ac30c9c6e086bc7446c984c51bebc6c7f50fbcd718037edba"}, + {file = "grpcio-1.44.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:19e54f0c7083c8332b5a75a9081fc5127f1dbb67b6c1a32bd7fe896ef0934918"}, + {file = "grpcio-1.44.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:bfd36b959c3c4e945119387baed1414ea46f7116886aa23de0172302b49d7ff1"}, + {file = "grpcio-1.44.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:ccd388b8f37b19d06e4152189726ce309e36dc03b53f2216a4ea49f09a7438e6"}, + {file = "grpcio-1.44.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:9075c0c003c1ff14ebce8f0ba55cc692158cb55c68da09cf8b0f9fc5b749e343"}, + {file = "grpcio-1.44.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:e898194f76212facbaeb6d7545debff29351afa23b53ff8f0834d66611af5139"}, + {file = "grpcio-1.44.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8fa6584046a7cf281649975a363673fa5d9c6faf9dc923f261cc0e56713b5892"}, + {file = "grpcio-1.44.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36a7bdd6ef9bca050c7ade8cba5f0e743343ea0756d5d3d520e915098a9dc503"}, + {file = "grpcio-1.44.0-cp38-cp38-win32.whl", hash = "sha256:dc3290d0411ddd2bd49adba5793223de8de8b01588d45e9376f1a9f7d25414f4"}, + {file = "grpcio-1.44.0-cp38-cp38-win_amd64.whl", hash = "sha256:13343e7b840c20f43b44f0e6d3bbdc037c964f0aec9735d7cb685c407731c9ff"}, + {file = "grpcio-1.44.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c5c2f8417d13386e18ccc8c61467cb6a6f9667a1ff7000a2d7d378e5d7df693f"}, + {file = "grpcio-1.44.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:cf220199b7b4992729ad4d55d5d3f652f4ccfe1a35b5eacdbecf189c245e1859"}, + {file = "grpcio-1.44.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4201c597e5057a9bfef9ea5777a6d83f6252cb78044db7d57d941ec2300734a5"}, + {file = "grpcio-1.44.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:e2de61005118ae59d48d5d749283ebfd1ba4ca68cc1000f8a395cd2bdcff7ceb"}, + {file = "grpcio-1.44.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:871078218fa9117e2a378678f327e32fda04e363ed6bc0477275444273255d4d"}, + {file = "grpcio-1.44.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8d610b7b557a7609fecee80b6dd793ecb7a9a3c3497fbdce63ce7d151cdd705"}, + {file = "grpcio-1.44.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fcb53e4eb8c271032c91b8981df5fc1bb974bc73e306ec2c27da41bd95c44b5"}, + {file = "grpcio-1.44.0-cp39-cp39-win32.whl", hash = "sha256:e50ddea6de76c09b656df4b5a55ae222e2a56e625c44250e501ff3c904113ec1"}, + {file = "grpcio-1.44.0-cp39-cp39-win_amd64.whl", hash = "sha256:d2ec124a986093e26420a5fb10fa3f02b2c232f924cdd7b844ddf7e846c020cd"}, + {file = "grpcio-1.44.0.tar.gz", hash = "sha256:4bae1c99896045d3062ab95478411c8d5a52cb84b91a1517312629fa6cfeb50e"}, +] +grpcio-tools = [ + {file = "grpcio-tools-1.44.0.tar.gz", hash = "sha256:be37f458ea510c9a8f1caabbc2b258d12e55d189a567f5edcace90f27dc0efbf"}, + {file = "grpcio_tools-1.44.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:9f58529e24f613019a85c258a274d441d89e0cad8cf7fca21ef3807ba5840c5d"}, + {file = "grpcio_tools-1.44.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:1d120082236f8d2877f8a19366476b82c3562423b877b7c471a142432e31c2c4"}, + {file = "grpcio_tools-1.44.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:65c2fe3cdc5425180f01dd303e28d4f363d38f4c2e3a7e1a87caedd5417e23bb"}, + {file = "grpcio_tools-1.44.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5caef118deb8cdee1978fd3d8e388a9b256cd8d34e4a8895731ac0e86fa5e47c"}, + {file = "grpcio_tools-1.44.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:121c9765cee8636201cf0d4e80bc7b509813194919bccdb66e9671c4ece6dac3"}, + {file = "grpcio_tools-1.44.0-cp310-cp310-win32.whl", hash = "sha256:90d1fac188bac838c4169eb3b67197887fa0572ea8a90519a20cddb080800549"}, + {file = "grpcio_tools-1.44.0-cp310-cp310-win_amd64.whl", hash = "sha256:3e16260dfe6e997330473863e01466b0992369ae2337a0249b390b4651cff424"}, + {file = "grpcio_tools-1.44.0-cp36-cp36m-linux_armv7l.whl", hash = "sha256:608414cc1093e1e9e5980c97a6ee78e51dffff359e7a3f123d1fb9d95b8763a5"}, + {file = "grpcio_tools-1.44.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:395609c06f69fbc79518b30a01931127088a3f9ef2cc2a35269c5f187eefd38c"}, + {file = "grpcio_tools-1.44.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f7ce16766b24b88ec0e4355f5dd66c2eee6af210e889fcb7961c9c4634c687de"}, + {file = "grpcio_tools-1.44.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:3c9abc4a40c62f46d5e43e49c7afc567dedf12eeef95933ac9ea2986baa2420b"}, + {file = "grpcio_tools-1.44.0-cp36-cp36m-manylinux_2_17_aarch64.whl", hash = "sha256:b73fd87a44ba1b91866b0254193c37cdb001737759b77b637cebe0c816d38342"}, + {file = "grpcio_tools-1.44.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b211f12e4cbc0fde8e0f982b0f581cce38874666a02ebfed93c23dcaeb8a4e0"}, + {file = "grpcio_tools-1.44.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b421dc9b27bcaff4c73644cd3801e4893b11ba3eb39729246fd3de98d9f685b"}, + {file = "grpcio_tools-1.44.0-cp36-cp36m-win32.whl", hash = "sha256:33d93027840a873c7b59402fe6db8263b88c56e2f84aa0b6281c05cc8bd314a1"}, + {file = "grpcio_tools-1.44.0-cp36-cp36m-win_amd64.whl", hash = "sha256:71fb6e7e66b918803b1bebd0231560981ab86c2546a3318a45822ce94de5e83d"}, + {file = "grpcio_tools-1.44.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:614c427ff235d92f103e9189f0230197c8f2f817d0dd9fd078f5d2ea4d920d02"}, + {file = "grpcio_tools-1.44.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:c13e0cb486cfa15320ddcd70452a4d736e6ce319c03d6b3c0c2513ec8d2748fb"}, + {file = "grpcio_tools-1.44.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:5ade6b13dc4e148f400c8f55a6ef0b14216a3371d7a9e559571d5981b6cec36b"}, + {file = "grpcio_tools-1.44.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6138d2c7eec7ed57585bc58e2dbcb65635a2d574ac632abd29949d3e68936bab"}, + {file = "grpcio_tools-1.44.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:3d6c8548b199591757dbfe89ed14e23782d6079d6d201c6c314c72f4086883aa"}, + {file = "grpcio_tools-1.44.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b41c419829f01734d65958ba9b01b759061d8f7e0698f9612ba6b8837269f7a9"}, + {file = "grpcio_tools-1.44.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9f0c5b4567631fec993826e694e83d86a972b3e2e9b05cb0c56839b0316d26c"}, + {file = "grpcio_tools-1.44.0-cp37-cp37m-win32.whl", hash = "sha256:3f0e1d1f3f5a6f0c9f8b5441819dbec831ce7e9ffe04768e4b0d965a95fbbe5e"}, + {file = "grpcio_tools-1.44.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f87fc86d0b4181b6b4da6ec6a29511dca000e6b5694fdd6bbf87d125128bc41"}, + {file = "grpcio_tools-1.44.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:cb8baa1d4cea35ca662c24098377bdd9514c56f227da0e38b43cd9b8223bfcc6"}, + {file = "grpcio_tools-1.44.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:ea36a294f7c70fd2f2bfb5dcf08602006304aa65b055ebd4f7c709e2a89deba7"}, + {file = "grpcio_tools-1.44.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:1972caf8f695b91edc6444134445798692fe71276f0cde7604d55e65179adf93"}, + {file = "grpcio_tools-1.44.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:674fb8d9c0e2d75166c4385753962485b757897223fc92a19c9e513ab80b96f7"}, + {file = "grpcio_tools-1.44.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:37045ba850d423cdacede77b266b127025818a5a36d80f1fd7a5a1614a6a0de5"}, + {file = "grpcio_tools-1.44.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cdf72947c6b0b03aa6dac06117a095947d02d43a5c6343051f4ce161fd0abcb"}, + {file = "grpcio_tools-1.44.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69bfa6fc1515c202fe428ba9f99e2b2f947b01bafc15d868798235b2e2d36baa"}, + {file = "grpcio_tools-1.44.0-cp38-cp38-win32.whl", hash = "sha256:2c516124356476d9afa126acce10ce568733120afbd9ae17ee01d44b9da20a67"}, + {file = "grpcio_tools-1.44.0-cp38-cp38-win_amd64.whl", hash = "sha256:ceb6441c24176705c5ab056e65a8b330e107107c5a492ba094d1b862a136d15d"}, + {file = "grpcio_tools-1.44.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:398eda759194d355eb09f7beabae6e4fb45b3877cf7efe505b49095fa4889cef"}, + {file = "grpcio_tools-1.44.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:a169bfd7a1fe8cc11472eeeeab3088b3c5d56caac12b2192a920b73adcbc974c"}, + {file = "grpcio_tools-1.44.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:a58aaaec0d846d142edd8e794ebb80aa429abfd581f4493a60a603aac0c50ac8"}, + {file = "grpcio_tools-1.44.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:c3253bee8b68fe422754faf0f286aa068861c926a7b11e4daeb44b9af767c7f1"}, + {file = "grpcio_tools-1.44.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:3c0be60721ae1ba09c4f29572a145f412e561b9201e19428758893709827f472"}, + {file = "grpcio_tools-1.44.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e44b9572c2226b85976e0d6054e22d7c59ebd6c9425ee71e5bc8910434aee3e1"}, + {file = "grpcio_tools-1.44.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c04ec47905c4f6d6dad34d29f6ace652cc1ddc986f55aaa5559b72104c3f5cf"}, + {file = "grpcio_tools-1.44.0-cp39-cp39-win32.whl", hash = "sha256:fb8c7b9d24e2c4dc77e7800e83b68081729ac6094b781b2afdabf08af18c3b28"}, + {file = "grpcio_tools-1.44.0-cp39-cp39-win_amd64.whl", hash = "sha256:4eb93619c8cb3773fb899504e3e30a0dc79d3904fd7a84091d15552178e1e920"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +loguru = [ + {file = "loguru-0.6.0-py3-none-any.whl", hash = "sha256:4e2414d534a2ab57573365b3e6d0234dfb1d84b68b7f3b948e6fb743860a77c3"}, + {file = "loguru-0.6.0.tar.gz", hash = "sha256:066bd06758d0a513e9836fd9c6b5a75bfb3fd36841f4b996bc60b547a309d41c"}, +] +mccabe = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] +packaging = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +protobuf = [ + {file = "protobuf-3.19.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f51d5a9f137f7a2cec2d326a74b6e3fc79d635d69ffe1b036d39fc7d75430d37"}, + {file = "protobuf-3.19.4-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:09297b7972da685ce269ec52af761743714996b4381c085205914c41fcab59fb"}, + {file = "protobuf-3.19.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072fbc78d705d3edc7ccac58a62c4c8e0cec856987da7df8aca86e647be4e35c"}, + {file = "protobuf-3.19.4-cp310-cp310-win32.whl", hash = "sha256:7bb03bc2873a2842e5ebb4801f5c7ff1bfbdf426f85d0172f7644fcda0671ae0"}, + {file = "protobuf-3.19.4-cp310-cp310-win_amd64.whl", hash = "sha256:f358aa33e03b7a84e0d91270a4d4d8f5df6921abe99a377828839e8ed0c04e07"}, + {file = "protobuf-3.19.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1c91ef4110fdd2c590effb5dca8fdbdcb3bf563eece99287019c4204f53d81a4"}, + {file = "protobuf-3.19.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c438268eebb8cf039552897d78f402d734a404f1360592fef55297285f7f953f"}, + {file = "protobuf-3.19.4-cp36-cp36m-win32.whl", hash = "sha256:835a9c949dc193953c319603b2961c5c8f4327957fe23d914ca80d982665e8ee"}, + {file = "protobuf-3.19.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4276cdec4447bd5015453e41bdc0c0c1234eda08420b7c9a18b8d647add51e4b"}, + {file = "protobuf-3.19.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6cbc312be5e71869d9d5ea25147cdf652a6781cf4d906497ca7690b7b9b5df13"}, + {file = "protobuf-3.19.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:54a1473077f3b616779ce31f477351a45b4fef8c9fd7892d6d87e287a38df368"}, + {file = "protobuf-3.19.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:435bb78b37fc386f9275a7035fe4fb1364484e38980d0dd91bc834a02c5ec909"}, + {file = "protobuf-3.19.4-cp37-cp37m-win32.whl", hash = "sha256:16f519de1313f1b7139ad70772e7db515b1420d208cb16c6d7858ea989fc64a9"}, + {file = "protobuf-3.19.4-cp37-cp37m-win_amd64.whl", hash = "sha256:cdc076c03381f5c1d9bb1abdcc5503d9ca8b53cf0a9d31a9f6754ec9e6c8af0f"}, + {file = "protobuf-3.19.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:69da7d39e39942bd52848438462674c463e23963a1fdaa84d88df7fbd7e749b2"}, + {file = "protobuf-3.19.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:48ed3877fa43e22bcacc852ca76d4775741f9709dd9575881a373bd3e85e54b2"}, + {file = "protobuf-3.19.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd95d1dfb9c4f4563e6093a9aa19d9c186bf98fa54da5252531cc0d3a07977e7"}, + {file = "protobuf-3.19.4-cp38-cp38-win32.whl", hash = "sha256:b38057450a0c566cbd04890a40edf916db890f2818e8682221611d78dc32ae26"}, + {file = "protobuf-3.19.4-cp38-cp38-win_amd64.whl", hash = "sha256:7ca7da9c339ca8890d66958f5462beabd611eca6c958691a8fe6eccbd1eb0c6e"}, + {file = "protobuf-3.19.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:36cecbabbda242915529b8ff364f2263cd4de7c46bbe361418b5ed859677ba58"}, + {file = "protobuf-3.19.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c1068287025f8ea025103e37d62ffd63fec8e9e636246b89c341aeda8a67c934"}, + {file = "protobuf-3.19.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96bd766831596d6014ca88d86dc8fe0fb2e428c0b02432fd9db3943202bf8c5e"}, + {file = "protobuf-3.19.4-cp39-cp39-win32.whl", hash = "sha256:84123274d982b9e248a143dadd1b9815049f4477dc783bf84efe6250eb4b836a"}, + {file = "protobuf-3.19.4-cp39-cp39-win_amd64.whl", hash = "sha256:3112b58aac3bac9c8be2b60a9daf6b558ca3f7681c130dcdd788ade7c9ffbdca"}, + {file = "protobuf-3.19.4-py2.py3-none-any.whl", hash = "sha256:8961c3a78ebfcd000920c9060a262f082f29838682b1f7201889300c1fbe0616"}, + {file = "protobuf-3.19.4.tar.gz", hash = "sha256:9df0c10adf3e83015ced42a9a7bd64e13d06c4cf45c340d2c63020ea04499d0a"}, +] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +pycodestyle = [ + {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, + {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, +] +pyflakes = [ + {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, + {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, +] +pyparsing = [ + {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, + {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, +] +pytest = [ + {file = "pytest-7.0.1-py3-none-any.whl", hash = "sha256:9ce3ff477af913ecf6321fe337b93a2c0dcf2a0a1439c43f5452112c1e4280db"}, + {file = "pytest-7.0.1.tar.gz", hash = "sha256:e30905a0c131d3d94b89624a1cc5afec3e0ba2fbdb151867d8e0ebd49850f171"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +win32-setctime = [ + {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, + {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, +] diff --git a/python/pyproject.toml b/python/pyproject.toml new file mode 100644 index 0000000000..4ffbcbea3e --- /dev/null +++ b/python/pyproject.toml @@ -0,0 +1,25 @@ +[tool.poetry] +name = "bbgo" +version = "0.1.9" +description = "" +authors = ["なるみ "] +packages = [ + { include = "bbgo" }, + { include = "bbgo_pb2.py" }, + { include = "bbgo_pb2_grpc.py" }, +] + +[tool.poetry.dependencies] +python = "^3.8" +click = "^8.0.4" +loguru = "^0.6.0" +grpcio = "^1.44.0" +grpcio-tools = "^1.44.0" +flake8 = "^4.0.1" + +[tool.poetry.dev-dependencies] +pytest = "^7.0.1" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/python/setup.cfg b/python/setup.cfg new file mode 100644 index 0000000000..aa1f28723d --- /dev/null +++ b/python/setup.cfg @@ -0,0 +1,14 @@ +[flake8] +max-line-length = 120 +per-file-ignores = __init__.py: F401 +# ignore = +exclude = bbgo_pb2.py, bbgo_pb2_grpc.py + +[yapf] +based_on_style = google +column_limit = 120 + +[isort] +not_skip = __init__.py +line_length = 120 +force_single_line = True diff --git a/python/tests/__init__.py b/python/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/tests/test_data.py b/python/tests/test_data.py new file mode 100644 index 0000000000..e5aee9e43e --- /dev/null +++ b/python/tests/test_data.py @@ -0,0 +1,82 @@ +from decimal import Decimal + +import bbgo_pb2 +from bbgo.data import Balance +from bbgo.data import ErrorMessage +from bbgo.data import KLine +from bbgo.utils import parse_time + + +def test_balance_from_pb(): + exchange = 'max' + currency = 'BTCUSDT' + available = '3.1415926' + locked = '2.7182818' + borrowed = '0.1234567' + + balance_pb = bbgo_pb2.Balance( + exchange=exchange, + currency=currency, + available=available, + locked=locked, + borrowed=borrowed, + ) + + balance = Balance.from_pb(balance_pb) + + assert balance.exchange == exchange + assert balance.currency == currency + assert balance.available == Decimal(available) + assert balance.locked == Decimal(locked) + assert balance.borrowed == Decimal(borrowed) + + +def test_kline_from_pb(): + exchange = "binance" + symbol = "BTCUSDT" + open = "39919.31" + high = "39919.32" + low = "39919.31" + close = "39919.31" + volume = "0.27697" + quote_volume = "11056.4530226" + start_time = 1649833260000 + end_time = 1649833319999 + closed = True + + kline_pb = bbgo_pb2.KLine(exchange=exchange, + symbol=symbol, + open=open, + high=high, + low=low, + close=close, + volume=volume, + quote_volume=quote_volume, + start_time=start_time, + end_time=end_time, + closed=closed) + + kline = KLine.from_pb(kline_pb) + + assert kline.exchange == exchange + assert kline.symbol == symbol + assert kline.open == Decimal(open) + assert kline.high == Decimal(high) + assert kline.low == Decimal(low) + assert kline.close == Decimal(close) + assert kline.volume == Decimal(volume) + assert kline.quote_volume == Decimal(quote_volume) + assert kline.start_time == parse_time(start_time) + assert kline.end_time == parse_time(end_time) + assert closed == closed + + +def test_order_from_pb(): + error_code = 123 + error_message = "error message 123" + + error_pb = bbgo_pb2.Error(error_code=error_code, error_message=error_message) + error = ErrorMessage.from_pb(error_pb) + + assert error.code == error_code + assert error.message == error_message diff --git a/python/tests/test_enums.py b/python/tests/test_enums.py new file mode 100644 index 0000000000..6fef2e9af8 --- /dev/null +++ b/python/tests/test_enums.py @@ -0,0 +1,15 @@ +from bbgo.enums import ChannelType + + +def test_channel_type_from_str(): + m = { + 'book': ChannelType.BOOK, + 'trade': ChannelType.TRADE, + 'ticker': ChannelType.TICKER, + 'kline': ChannelType.KLINE, + 'balance': ChannelType.BALANCE, + 'order': ChannelType.ORDER, + } + + for k, v in m.items(): + assert ChannelType.from_str(k) == v diff --git a/python/tests/test_utils.py b/python/tests/test_utils.py new file mode 100644 index 0000000000..bf673c8d7a --- /dev/null +++ b/python/tests/test_utils.py @@ -0,0 +1,19 @@ +from decimal import Decimal + +from bbgo.utils import parse_number +from bbgo.utils import parse_time + + +def test_parse_time(): + t = 1650610080000 + d = parse_time(t) + + assert d.timestamp() == t / 1000 + + +def test_parse_float(): + assert parse_number(None) == 0 + assert parse_number("") == 0 + + s = "3.14159265358979" + assert parse_number(s) == Decimal(s) diff --git a/rockhopper_mysql.yaml b/rockhopper_mysql.yaml new file mode 100644 index 0000000000..2519de0ae3 --- /dev/null +++ b/rockhopper_mysql.yaml @@ -0,0 +1,15 @@ +# vim:filetype=yaml: +# you can copy this file to rockhopper_mysql_local.yaml to have your modification +--- +driver: mysql +dialect: mysql + +# unix socket connection to mysql with password +# dsn: "root:123123@unix(/opt/local/var/run/mysql57/mysqld.sock)/bbgo_dev?parseTime=true" + +# tcp connection to mysql with password +dsn: "root:root@tcp(localhost:3306)/bbgo?parseTime=true" + +# tcp connection to mysql without password +# dsn: "root@tcp(localhost:3306)/bbgo_dev?parseTime=true" +migrationsDir: migrations/mysql diff --git a/rockhopper_sqlite.yaml b/rockhopper_sqlite.yaml new file mode 100644 index 0000000000..259e7fe679 --- /dev/null +++ b/rockhopper_sqlite.yaml @@ -0,0 +1,5 @@ +--- +driver: sqlite3 +dialect: sqlite3 +dsn: "bbgo.sqlite3" +migrationsDir: migrations/sqlite3 diff --git a/scripts/download-dnum.sh b/scripts/download-dnum.sh new file mode 100644 index 0000000000..a8aeef0119 --- /dev/null +++ b/scripts/download-dnum.sh @@ -0,0 +1,40 @@ +#!/bin/bash +set -e +version=$(curl -fs https://api.github.com/repos/c9s/bbgo/releases/latest | awk -F '"' '/tag_name/{print $4}') +osf=$(uname | tr '[:upper:]' '[:lower:]') +arch="" +case $(uname -m) in + x86_64 | ia64) arch="amd64";; + arm64 | aarch64 | arm) arch="arm64";; + *) + echo "unsupported architecture: $(uname -m)" + exit 1;; +esac +dist_file=bbgo-dnum-$version-$osf-$arch.tar.gz + +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +NC='\033[0m' # No Color + +function warn() +{ + echo -e "${YELLOW}$@${NC}" +} + +function error() +{ + echo -e "${RED}$@${NC}" +} + +function info() +{ + echo -e "${GREEN}$@${NC}" +} + +info "downloading..." +curl -O -L https://github.com/c9s/bbgo/releases/download/$version/$dist_file +tar xzf $dist_file +mv bbgo-dnum-$osf-$arch bbgo +chmod +x bbgo +info "downloaded successfully" diff --git a/scripts/download.sh b/scripts/download.sh index c90b83f704..9d5268e2dd 100755 --- a/scripts/download.sh +++ b/scripts/download.sh @@ -1,13 +1,40 @@ #!/bin/bash +set -e +version=$(curl -fs https://api.github.com/repos/c9s/bbgo/releases/latest | awk -F '"' '/tag_name/{print $4}') +arch="" +case $(uname -m) in + x86_64 | ia64) arch="amd64";; + arm64 | aarch64 | arm) arch="arm64";; + *) + echo "unsupported architecture: $(uname -m)" + exit 1;; +esac osf=$(uname | tr '[:upper:]' '[:lower:]') -version=v1.6.0 +dist_file=bbgo-$version-$osf-$arch.tar.gz -if [[ -n $1 ]] ; then - version=$1 -fi +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +NC='\033[0m' # No Color -echo "downloading bbgo $version" -curl -L -o bbgo https://github.com/c9s/bbgo/releases/download/$version/bbgo-$osf -chmod +x bbgo +function warn() +{ + echo -e "${YELLOW}$@${NC}" +} + +function error() +{ + echo -e "${RED}$@${NC}" +} -echo "bbgo is downloaded at ./bbgo" +function info() +{ + echo -e "${GREEN}$@${NC}" +} + +info "downloading..." +curl -O -L https://github.com/c9s/bbgo/releases/download/$version/$dist_file +tar xzf $dist_file +mv bbgo-$osf-$arch bbgo +chmod +x bbgo +info "downloaded successfully" diff --git a/scripts/max.sh b/scripts/max.sh index 565f358ffa..b66290a016 100644 --- a/scripts/max.sh +++ b/scripts/max.sh @@ -24,6 +24,30 @@ case "$command" in submitOrder order_params ;; + deposits) + declare -A params=() + currency=$1 + if [[ -n $currency ]] ; then + params[currency]=$currency + fi + + deposits params \ + | jq -r '.[] | [ .uuid, .txid, ((.amount | tonumber) * 10000 | floor / 10000), .currency, .state, (.created_at | strflocaltime("%Y-%m-%dT%H:%M:%S %Z")), .note ] | @tsv' \ + | column -ts $'\t' + ;; + + withdrawals) + declare -A params=() + currency=$1 + if [[ -n $currency ]] ; then + params[currency]=$currency + fi + + withdrawals params \ + | jq -r '.[] | [ .uuid, .txid, ((.amount | tonumber) * 10000 | floor / 10000), .currency, ((.fee | tonumber) * 10000 | floor / 10000), .fee_currency, .state, (.created_at | strflocaltime("%Y-%m-%dT%H:%M:%S %Z")), .note ] | @tsv' \ + | column -ts $'\t' + ;; + limit) market=$1 side=$2 @@ -62,6 +86,20 @@ case "$command" in jq -r '.[] | "\(.id) \(.market) \(.side) \(.ord_type) \(if .ord_type | test("stop") then "stop@" + .stop_price else "" end) price = \(if .ord_type | test("market") then "any" else .price end) \t volume = \(.volume) \(.state)"' ;; + order) + if [[ $# < 1 ]] ; then + echo "$0 order [id]" + exit + fi + + id=$1 + declare -A orders_params=() + orders_params[id]=$id + myOrder orders_params | \ + jq -r '.' + ;; + + cancel) if [[ $# < 1 ]] ; then echo "$0 cancel [oid]" @@ -78,11 +116,12 @@ case "$command" in if [[ -n $currency ]] ; then rewards_params[currency]=$currency fi + # rewards rewards_params | jq -r '.[] | "\(.type)\t\((.amount | tonumber) * 1000 | floor / 1000)\t\(.currency) \(.state) \(.created_at | strflocaltime("%Y-%m-%dT%H:%M:%S %Z"))"' - rewards rewards_params | jq -r '.[] | [ .type, ((.amount | tonumber) * 10000 | floor / 10000), .currency, .state, (.created_at | strflocaltime("%Y-%m-%dT%H:%M:%S %Z")) ] | @tsv' \ + rewards rewards_params | jq -r '.[] | [ .uuid, .type, ((.amount | tonumber) * 10000 | floor / 10000), .currency, .state, (.created_at | strflocaltime("%Y-%m-%dT%H:%M:%S %Z")), .note ] | @tsv' \ | column -ts $'\t' ;; - + trades) if [[ $# < 1 ]] ; then echo "$0 trades [market]" diff --git a/scripts/maxapi.sh b/scripts/maxapi.sh index 24367c7614..a5756e5fc9 100755 --- a/scripts/maxapi.sh +++ b/scripts/maxapi.sh @@ -116,21 +116,38 @@ function cancelOrder() function myOrders() { - local -n params=$1 - send_auth_request "GET" "/api/v2/orders" params + local -n _params=$1 + send_auth_request "GET" "/api/v2/orders" _params } +function myOrder() +{ + local -n _params=$1 + send_auth_request "GET" "/api/v2/order" _params +} function myTrades() { - local -n params=$1 - send_auth_request "GET" "/api/v2/trades/my" params + local -n _params=$1 + send_auth_request "GET" "/api/v2/trades/my" _params } function rewards() { - local -n params=$1 - send_auth_request "GET" "/api/v2/rewards" params + local -n _params=$1 + send_auth_request "GET" "/api/v2/rewards" _params +} + +function deposits() +{ + local -n _params=$1 + send_auth_request "GET" "/api/v2/deposits" _params +} + +function withdrawals() +{ + local -n _params=$1 + send_auth_request "GET" "/api/v2/withdrawals" _params } diff --git a/scripts/release-test.sh b/scripts/release-test.sh new file mode 100644 index 0000000000..c33afe69a3 --- /dev/null +++ b/scripts/release-test.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -e +echo "testing sync..." +dotenv -f .env.local.mysql -- go run ./cmd/bbgo sync --session binance --config config/sync.yaml +dotenv -f .env.local.sqlite -- go run ./cmd/bbgo sync --session binance --config config/sync.yaml + +echo "backtest sync..." +echo "backtest mysql sync..." +dotenv -f .env.local.mysql -- go run ./cmd/bbgo backtest --config config/dca.yaml --sync --sync-only --verify + +echo "backtest sqlite sync..." +dotenv -f .env.local.sqlite -- go run ./cmd/bbgo backtest --config config/dca.yaml --sync --sync-only --verify diff --git a/scripts/setup-bollgrid-dnum.sh b/scripts/setup-bollgrid-dnum.sh new file mode 100644 index 0000000000..b5e6f273ba --- /dev/null +++ b/scripts/setup-bollgrid-dnum.sh @@ -0,0 +1,109 @@ +#!/bin/bash +set -e + +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +NC='\033[0m' # No Color + +function warn() +{ + echo -e "${YELLOW}$@${NC}" +} + +function error() +{ + echo -e "${RED}$@${NC}" +} + +function info() +{ + echo -e "${GREEN}$@${NC}" +} +version=$(curl -fs https://api.github.com/repos/c9s/bbgo/releases/latest | awk -F '"' '/tag_name/{print $4}') +osf=$(uname | tr '[:upper:]' '[:lower:]') +arch="" +case $(uname -m) in + x86_64 | ia64) arch="amd64";; + arm64 | aarch64 | arm) arch="arm64";; + *) + echo "unsupported architecture: $(uname -m)" + exit 1;; +esac +dist_file=bbgo-dnum-$version-$osf-$arch.tar.gz + +info "downloading..." +curl -O -L https://github.com/c9s/bbgo/releases/download/$version/$dist_file +tar xzf $dist_file +mv bbgo-dnum-$osf-$arch bbgo +chmod +x bbgo +info "downloaded successfully" + +function gen_dotenv() +{ + read -p "Enter your MAX API key: " api_key + read -p "Enter your MAX API secret: " api_secret + echo "Generating your .env.local file..." +cat < .env.local +MAX_API_KEY=$api_key +MAX_API_SECRET=$api_secret +END + +} + +if [[ -e ".env.local" ]] ; then + echo "Found existing .env.local, you will overwrite the existing .env.local file!" + read -p "Are you sure? (Y/n) " a + if [[ $a != "n" ]] ; then + gen_dotenv + fi +else + gen_dotenv +fi + +if [[ -e "bbgo.yaml" ]] ; then + echo "Found existing bbgo.yaml, you will overwrite the existing bbgo.yaml file!" + read -p "Are you sure? (Y/n) " a + if [[ $a == "n" ]] ; then + exit + fi +fi + +cat < bbgo.yaml +--- +exchangeStrategies: +- on: max + bollgrid: + symbol: BTCUSDT + interval: 1h + gridNumber: 20 + quantity: 0.001 + profitSpread: 100.0 + +END + +info "config file is generated successfully" +echo "================================================================" +echo "now you can edit your strategy config file bbgo.yaml to run bbgo" + +if [[ $osf == "darwin" ]] ; then + echo "we found you're using MacOS, you can type:" + echo "" + echo " open -a TextEdit bbgo.yaml" + echo "" +else + echo "you look like a pro user, you can edit the config by:" + echo "" + echo " vim bbgo.yaml" + echo "" +fi + +echo "To run bbgo just type: " +echo "" +echo " ./bbgo run" +echo "" +echo "To stop bbgo, just hit CTRL-C" + +if [[ $osf == "darwin" ]] ; then + open -a TextEdit bbgo.yaml +fi diff --git a/scripts/setup-bollgrid.sh b/scripts/setup-bollgrid.sh index c57cbe40de..fce4efef49 100755 --- a/scripts/setup-bollgrid.sh +++ b/scripts/setup-bollgrid.sh @@ -1,23 +1,62 @@ #!/bin/bash +set -e + +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +NC='\033[0m' # No Color + +function warn() +{ + echo -e "${YELLOW}$@${NC}" +} + +function error() +{ + echo -e "${RED}$@${NC}" +} + +function info() +{ + echo -e "${GREEN}$@${NC}" +} +version=$(curl -fs https://api.github.com/repos/c9s/bbgo/releases/latest | awk -F '"' '/tag_name/{print $4}') osf=$(uname | tr '[:upper:]' '[:lower:]') -version=v1.6.0 +arch="" +case $(uname -m) in + x86_64 | ia64) arch="amd64";; + arm64 | aarch64 | arm) arch="arm64";; + *) + echo "unsupported architecture: $(uname -m)" + exit 1;; +esac +dist_file=bbgo-$version-$osf-$arch.tar.gz +exchange=max + +if [[ -n $1 ]] ; then + exchange=$1 +fi -echo "Downloading bbgo" -curl -L -o bbgo https://github.com/c9s/bbgo/releases/download/$version/bbgo-$osf +exchange_upper=$(echo -n $exchange | tr 'a-z' 'A-Z') + +info "downloading..." +curl -O -L https://github.com/c9s/bbgo/releases/download/$version/$dist_file +tar xzf $dist_file +mv bbgo-$osf-$arch bbgo chmod +x bbgo -echo "Binary downloaded" -echo "Config file is generated" +info "downloaded successfully" function gen_dotenv() { - read -p "Enter your MAX API key: " api_key - read -p "Enter your MAX API secret: " api_secret - echo "Generating your .env.local file..." + read -p "Enter your $exchange_upper API key: " api_key + read -p "Enter your $exchange_upper API secret: " api_secret + info "Generating your .env.local file..." cat < .env.local -export MAX_API_KEY=$api_key -export MAX_API_SECRET=$api_secret +${exchange_upper}_API_KEY=$api_key +${exchange_upper}_API_SECRET=$api_secret END + info "dotenv is configured successfully" } if [[ -e ".env.local" ]] ; then @@ -40,43 +79,36 @@ fi cat < bbgo.yaml --- -riskControls: - sessionBased: - max: - orderExecutor: - bySymbol: - BTCUSDT: - # basic risk control order executor - basic: - minQuoteBalance: 100.0 - maxBaseAssetBalance: 3.0 - minBaseAssetBalance: 0.0 - maxOrderAmount: 1000.0 - exchangeStrategies: -- on: max +- on: ${exchange} bollgrid: symbol: BTCUSDT - interval: 5m + interval: 1h gridNumber: 20 quantity: 0.001 - profitSpread: 50.0 + profitSpread: 100.0 + END -echo "Config file is generated" +info "config file is generated successfully" echo "================================================================" -echo "Now you can edit your strategy config file bbgo.yaml to run bbgo" +echo "now you can edit your strategy config file bbgo.yaml to run bbgo" if [[ $osf == "darwin" ]] ; then - echo "We found you're using MacOS, you can type:" + echo "we found you're using MacOS, you can type:" echo "" echo " open -a TextEdit bbgo.yaml" echo "" +else + echo "you look like a pro user, you can edit the config by:" + echo "" + echo " vim bbgo.yaml" + echo "" fi echo "To run bbgo just type: " echo "" -echo " source .env.local && ./bbgo run --config bbgo.yaml" +echo " ./bbgo run" echo "" echo "To stop bbgo, just hit CTRL-C" diff --git a/scripts/setup-bollmaker.sh b/scripts/setup-bollmaker.sh new file mode 100755 index 0000000000..11a1d90eaa --- /dev/null +++ b/scripts/setup-bollmaker.sh @@ -0,0 +1,185 @@ +#!/bin/bash +set -e + +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +NC='\033[0m' # No Color + +function warn() +{ + echo -e "${YELLOW}$@${NC}" +} + +function error() +{ + echo -e "${RED}$@${NC}" +} + +function info() +{ + echo -e "${GREEN}$@${NC}" +} +version=$(curl -fs https://api.github.com/repos/c9s/bbgo/releases/latest | awk -F '"' '/tag_name/{print $4}') +osf=$(uname | tr '[:upper:]' '[:lower:]') +arch="" +case $(uname -m) in + x86_64 | ia64) arch="amd64";; + arm64 | aarch64 | arm) arch="arm64";; + *) + echo "unsupported architecture: $(uname -m)" + exit 1;; +esac +dist_file=bbgo-$version-$osf-$arch.tar.gz + +info "downloading..." +curl -O -L https://github.com/c9s/bbgo/releases/download/$version/$dist_file +tar xzf $dist_file +mv bbgo-$osf-$arch bbgo +chmod +x bbgo +info "downloaded successfully" + +function gen_dotenv() +{ + read -p "Enter your Binance API key: " api_key + read -p "Enter your Binance API secret: " api_secret + echo "Generating your .env.local file..." +cat < .env.local +BINANCE_API_KEY=$api_key +BINANCE_API_SECRET=$api_secret +END + +} + +if [[ -e ".env.local" ]] ; then + echo "Found existing .env.local, you will overwrite the existing .env.local file!" + read -p "Are you sure? (Y/n) " a + if [[ $a != "n" ]] ; then + gen_dotenv + fi +else + gen_dotenv +fi + +if [[ -e "bbgo.yaml" ]] ; then + echo "Found existing bbgo.yaml, you will overwrite the existing bbgo.yaml file!" + read -p "Are you sure? (Y/n) " a + if [[ $a == "n" ]] ; then + exit + fi +fi + +cat < bbgo.yaml +--- +sessions: + binance: + exchange: binance + envVarPrefix: BINANCE + +persistence: + redis: + host: 127.0.0.1 + port: 6379 + db: 0 + +exchangeStrategies: +- on: binance + bollmaker: + symbol: ETHUSDT + + # interval is how long do you want to update your order price and quantity + interval: 1m + + # quantity is the base order quantity for your buy/sell order. + quantity: 0.05 + + # useTickerPrice use the ticker api to get the mid price instead of the closed kline price. + # The back-test engine is kline-based, so the ticker price api is not supported. + # Turn this on if you want to do real trading. + useTickerPrice: false + + # spread is the price spread from the middle price. + # For ask orders, the ask price is ((bestAsk + bestBid) / 2 * (1.0 + spread)) + # For bid orders, the bid price is ((bestAsk + bestBid) / 2 * (1.0 - spread)) + # Spread can be set by percentage or floating number. e.g., 0.1% or 0.001 + spread: 0.09% + + # minProfitSpread is the minimal order price spread from the current average cost. + # For long position, you will only place sell order above the price (= average cost * (1 + minProfitSpread)) + # For short position, you will only place buy order below the price (= average cost * (1 - minProfitSpread)) + minProfitSpread: 0.5% + + # dynamicExposurePositionScale overrides maxExposurePosition + # for domain, + # -1 means -100%, the price is on the lower band price. + # if the price breaks the lower band, a number less than -1 will be given. + # 1 means 100%, the price is on the upper band price. + # if the price breaks the upper band, a number greater than 1 will be given, for example, 1.2 for 120%, and 1.3 for 130%. + dynamicExposurePositionScale: + byPercentage: + # exp means we want to use exponential scale, you can replace "exp" with "linear" for linear scale + exp: + # from lower band -100% (-1) to upper band 100% (+1) + domain: [ -1, 1 ] + # when in down band, holds 1.0 by maximum + # when in up band, holds 0.05 by maximum + range: [ 10.0, 1.0 ] + + # DisableShort means you can don't want short position during the market making + # THe short here means you might sell some of your existing inventory. + disableShort: true + + # uptrendSkew, like the strongUptrendSkew, but the price is still in the default band. + uptrendSkew: 0.8 + + # downtrendSkew, like the strongDowntrendSkew, but the price is still in the default band. + downtrendSkew: 1.2 + + defaultBollinger: + interval: "1h" + window: 21 + bandWidth: 2.0 + + # neutralBollinger is the smaller range of the bollinger band + # If price is in this band, it usually means the price is oscillating. + neutralBollinger: + interval: "5m" + window: 21 + bandWidth: 2.0 + + # tradeInBand: when tradeInBand is set, you will only place orders in the bollinger band. + tradeInBand: false + + # buyBelowNeutralSMA: when this set, it will only place buy order when the current price is below the SMA line. + buyBelowNeutralSMA: false + + persistence: + type: redis + +END + +info "config file is generated successfully" +echo "================================================================" +echo "now you can edit your strategy config file bbgo.yaml to run bbgo" + +if [[ $osf == "darwin" ]] ; then + echo "we found you're using MacOS, you can type:" + echo "" + echo " open -a TextEdit bbgo.yaml" + echo "" +else + echo "you look like a pro user, you can edit the config by:" + echo "" + echo " vim bbgo.yaml" + echo "" +fi + +echo "To run bbgo just type: " +echo "" +echo " ./bbgo run" +echo "" +echo "To stop bbgo, just hit CTRL-C" + +if [[ $osf == "darwin" ]] ; then + open -a TextEdit bbgo.yaml +fi diff --git a/scripts/setup-dnum.sh b/scripts/setup-dnum.sh new file mode 100644 index 0000000000..965705ac50 --- /dev/null +++ b/scripts/setup-dnum.sh @@ -0,0 +1,114 @@ +#!/bin/bash +set -e +version=$(curl -fs https://api.github.com/repos/c9s/bbgo/releases/latest | awk -F '"' '/tag_name/{print $4}') +osf=$(uname | tr '[:upper:]' '[:lower:]') +arch="" +case $(uname -m) in + x86_64 | ia64) arch="amd64";; + arm64 | aarch64 | arm) arch="arm64";; + *) + echo "unsupported architecture: $(uname -m)" + exit 1;; +esac +dist_file=bbgo-dnum-$version-$osf-$arch.tar.gz + +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +NC='\033[0m' # No Color + +function warn() +{ + echo -e "${YELLOW}$@${NC}" +} + +function error() +{ + echo -e "${RED}$@${NC}" +} + +function info() +{ + echo -e "${GREEN}$@${NC}" +} + +info "downloading..." +curl -O -L https://github.com/c9s/bbgo/releases/download/$version/$dist_file +tar xzf $dist_file +mv bbgo-dnum-$osf-$arch bbgo +chmod +x bbgo +info "downloaded successfully" + +if [[ -e "bbgo.yaml" ]] ; then + echo "Found existing bbgo.yaml, you will overwrite the existing bbgo.yaml file!" + read -p "Are you sure? (Y/n) " a + if [[ $a == "n" ]] ; then + exit + fi +fi + +cat < bbgo.yaml +--- +riskControls: + sessionBased: + max: + orderExecutor: + bySymbol: + BTCUSDT: + # basic risk control order executor + basic: + minQuoteBalance: 100.0 + maxBaseAssetBalance: 3.0 + minBaseAssetBalance: 0.0 + maxOrderAmount: 1000.0 + +exchangeStrategies: +- on: max + grid: + symbol: BTCUSDT + quantity: 0.002 + gridNumber: 100 + profitSpread: 50.0 + upperPrice: 14000.0 + lowerPrice: 11000.0 +END + +echo "Config file is generated" + +if [[ -e ".env.local" ]] ; then + echo "Found existing .env.local, you will overwrite the existing .env.local file!" + read -p "Are you sure? (Y/n) " a + if [[ $a == "n" ]] ; then + exit + fi +fi + +read -p "Enter your MAX API key: " api_key + +read -p "Enter your MAX API secret: " api_secret + +echo "Generating your .env.local file..." +cat < .env.local +export MAX_API_KEY=$api_key +export MAX_API_SECRET=$api_secret +END + +echo "Now you can edit your strategy config file bbgo.yaml to run bbgo" + +if [[ $osf == "darwin" ]] ; then + echo "We found you're using MacOS, you can type:" + echo "" + echo " open -a TextEdit bbgo.yaml" + echo "" +fi + +echo "To run bbgo just type: " +echo "" +echo " source .env.local && ./bbgo run --config bbgo.yaml" +echo "" +echo "To stop bbgo, just hit CTRL-C" + +if [[ $osf == "darwin" ]] ; then + open -a TextEdit bbgo.yaml +fi + diff --git a/scripts/setup-grid-dnum.sh b/scripts/setup-grid-dnum.sh new file mode 100644 index 0000000000..6e93bbc182 --- /dev/null +++ b/scripts/setup-grid-dnum.sh @@ -0,0 +1,121 @@ +#!/bin/bash +set -e + +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +NC='\033[0m' # No Color + +function warn() +{ + echo -e "${YELLOW}$@${NC}" +} + +function error() +{ + echo -e "${RED}$@${NC}" +} + +function info() +{ + echo -e "${GREEN}$@${NC}" +} +version=$(curl -fs https://api.github.com/repos/c9s/bbgo/releases/latest | awk -F '"' '/tag_name/{print $4}') +osf=$(uname | tr '[:upper:]' '[:lower:]') +arch="" +case $(uname -m) in + x86_64 | ia64) arch="amd64";; + arm64 | aarch64 | arm) arch="arm64";; + *) + echo "unsupported architecture: $(uname -m)" + exit 1;; +esac +dist_file=bbgo-dnum-$version-$osf-$arch.tar.gz +exchange=max + +if [[ -n $1 ]] ; then + exchange=$1 +fi + +exchange_upper=$(echo -n $exchange | tr 'a-z' 'A-Z') + + +info "downloading..." +curl -O -L https://github.com/c9s/bbgo/releases/download/$version/$dist_file +tar xzf $dist_file +mv bbgo-dnum-$osf-$arch bbgo +chmod +x bbgo +info "downloaded successfully" + +function gen_dotenv() +{ + read -p "Enter your $exchange_upper API key: " api_key + read -p "Enter your $exchange_upper API secret: " api_secret + info "generating your .env.local file..." +cat < .env.local +${exchange_upper}_API_KEY=$api_key +${exchange_upper}_API_SECRET=$api_secret +END + + info "dotenv is configured successfully" +} + +if [[ -e ".env.local" ]] ; then + warn "found an existing .env.local, you will overwrite the existing .env.local file!" + read -p "are you sure? (Y/n) " a + if [[ $a != "n" ]] ; then + gen_dotenv + fi +else + gen_dotenv +fi + + +if [[ -e "bbgo.yaml" ]] ; then + warn "found existing bbgo.yaml, you will overwrite the existing bbgo.yaml file!" + read -p "are you sure? (Y/n) " a + if [[ $a == "n" ]] ; then + exit + fi +fi + +cat < bbgo.yaml +--- +exchangeStrategies: +- on: ${exchange} + grid: + symbol: BTCUSDT + quantity: 0.001 + gridNumber: 100 + profitSpread: 100.0 + upperPrice: 50_000.0 + lowerPrice: 10_000.0 + long: true + +END + +info "config file is generated successfully" +echo "================================================================" +echo "now you can edit your strategy config file bbgo.yaml to run bbgo" + +if [[ $osf == "darwin" ]] ; then + echo "we found you're using MacOS, you can type:" + echo "" + echo " open -a TextEdit bbgo.yaml" + echo "" +else + echo "you look like a pro user, you can edit the config by:" + echo "" + echo " vim bbgo.yaml" + echo "" +fi + +echo "To run bbgo just type: " +echo "" +echo " ./bbgo run" +echo "" +echo "To stop bbgo, just hit CTRL-C" + +if [[ $osf == "darwin" ]] ; then + open -a TextEdit bbgo.yaml +fi diff --git a/scripts/setup-grid.sh b/scripts/setup-grid.sh index 2347f1f9a4..664c4bd5ed 100755 --- a/scripts/setup-grid.sh +++ b/scripts/setup-grid.sh @@ -1,27 +1,69 @@ #!/bin/bash +set -e + +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +NC='\033[0m' # No Color + +function warn() +{ + echo -e "${YELLOW}$@${NC}" +} + +function error() +{ + echo -e "${RED}$@${NC}" +} + +function info() +{ + echo -e "${GREEN}$@${NC}" +} + +version=$(curl -fs https://api.github.com/repos/c9s/bbgo/releases/latest | awk -F '"' '/tag_name/{print $4}') osf=$(uname | tr '[:upper:]' '[:lower:]') -version=v1.6.0 +arch="" +case $(uname -m) in + x86_64 | ia64) arch="amd64";; + arm64 | aarch64 | arm) arch="arm64";; + *) + echo "unsupported architecture: $(uname -m)" + exit 1;; +esac +dist_file=bbgo-$version-$osf-$arch.tar.gz +exchange=max + +if [[ -n $1 ]] ; then + exchange=$1 +fi + +exchange_upper=$(echo -n $exchange | tr 'a-z' 'A-Z') + -echo "Downloading bbgo" -curl -L -o bbgo https://github.com/c9s/bbgo/releases/download/$version/bbgo-$osf +info "downloading..." +curl -O -L https://github.com/c9s/bbgo/releases/download/$version/$dist_file +tar xzf $dist_file +mv bbgo-$osf-$arch bbgo chmod +x bbgo -echo "Binary downloaded" +info "downloaded successfully" function gen_dotenv() { - read -p "Enter your MAX API key: " api_key - read -p "Enter your MAX API secret: " api_secret - echo "Generating your .env.local file..." + read -p "Enter your $exchange_upper API key: " api_key + read -p "Enter your $exchange_upper API secret: " api_secret + info "generating your .env.local file..." cat < .env.local -export MAX_API_KEY=$api_key -export MAX_API_SECRET=$api_secret +${exchange_upper}_API_KEY=$api_key +${exchange_upper}_API_SECRET=$api_secret END + info "dotenv is configured successfully" } if [[ -e ".env.local" ]] ; then - echo "Found existing .env.local, you will overwrite the existing .env.local file!" - read -p "Are you sure? (Y/n) " a + warn "found an existing .env.local, you will overwrite the existing .env.local file!" + read -p "are you sure? (Y/n) " a if [[ $a != "n" ]] ; then gen_dotenv fi @@ -31,8 +73,8 @@ fi if [[ -e "bbgo.yaml" ]] ; then - echo "Found existing bbgo.yaml, you will overwrite the existing bbgo.yaml file!" - read -p "Are you sure? (Y/n) " a + warn "found existing bbgo.yaml, you will overwrite the existing bbgo.yaml file!" + read -p "are you sure? (Y/n) " a if [[ $a == "n" ]] ; then exit fi @@ -40,44 +82,38 @@ fi cat < bbgo.yaml --- -riskControls: - sessionBased: - max: - orderExecutor: - bySymbol: - BTCUSDT: - # basic risk control order executor - basic: - minQuoteBalance: 100.0 - maxBaseAssetBalance: 3.0 - minBaseAssetBalance: 0.0 - maxOrderAmount: 1000.0 - exchangeStrategies: -- on: max +- on: ${exchange} grid: symbol: BTCUSDT - quantity: 0.002 + quantity: 0.001 gridNumber: 100 - profitSpread: 50.0 - upperPrice: 14000.0 - lowerPrice: 11000.0 + profitSpread: 100.0 + upperPrice: 50_000.0 + lowerPrice: 10_000.0 + long: true + END -echo "Config file is generated" +info "config file is generated successfully" echo "================================================================" -echo "Now you can edit your strategy config file bbgo.yaml to run bbgo" +echo "now you can edit your strategy config file bbgo.yaml to run bbgo" if [[ $osf == "darwin" ]] ; then - echo "We found you're using MacOS, you can type:" + echo "we found you're using MacOS, you can type:" echo "" echo " open -a TextEdit bbgo.yaml" echo "" +else + echo "you look like a pro user, you can edit the config by:" + echo "" + echo " vim bbgo.yaml" + echo "" fi echo "To run bbgo just type: " echo "" -echo " source .env.local && ./bbgo run --config bbgo.yaml" +echo " ./bbgo run" echo "" echo "To stop bbgo, just hit CTRL-C" diff --git a/scripts/setup.sh b/scripts/setup.sh index b56b45de30..461a475e1c 100755 --- a/scripts/setup.sh +++ b/scripts/setup.sh @@ -1,11 +1,43 @@ #!/bin/bash +set -e +version=$(curl -fs https://api.github.com/repos/c9s/bbgo/releases/latest | awk -F '"' '/tag_name/{print $4}') osf=$(uname | tr '[:upper:]' '[:lower:]') -version=v1.6.0 +arch="" +case $(uname -m) in + x86_64 | ia64) arch="amd64";; + arm64 | aarch64 | arm) arch="arm64";; + *) + echo "unsupported architecture: $(uname -m)" + exit 1;; +esac +dist_file=bbgo-$version-$osf-$arch.tar.gz -echo "Downloading bbgo" -curl -L -o bbgo https://github.com/c9s/bbgo/releases/download/$version/bbgo-$osf +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +NC='\033[0m' # No Color + +function warn() +{ + echo -e "${YELLOW}$@${NC}" +} + +function error() +{ + echo -e "${RED}$@${NC}" +} + +function info() +{ + echo -e "${GREEN}$@${NC}" +} + +info "downloading..." +curl -O -L https://github.com/c9s/bbgo/releases/download/$version/$dist_file +tar xzf $dist_file +mv bbgo-$osf-$arch bbgo chmod +x bbgo -echo "Binary downloaded" +info "downloaded successfully" if [[ -e "bbgo.yaml" ]] ; then echo "Found existing bbgo.yaml, you will overwrite the existing bbgo.yaml file!" diff --git a/scripts/test-mysql-migrations.sh b/scripts/test-mysql-migrations.sh new file mode 100755 index 0000000000..9f543bf686 --- /dev/null +++ b/scripts/test-mysql-migrations.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -e +rockhopper --config rockhopper_mysql.yaml up +rockhopper --config rockhopper_mysql.yaml down --to 1 diff --git a/scripts/test-sqlite3-migrations.sh b/scripts/test-sqlite3-migrations.sh new file mode 100755 index 0000000000..b101d0f69a --- /dev/null +++ b/scripts/test-sqlite3-migrations.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -e +rm -fv bbgo.sqlite3 +rockhopper --config rockhopper_sqlite.yaml up +rockhopper --config rockhopper_sqlite.yaml down --to 1 diff --git a/utils/binance-margin-stream/main.go b/utils/binance-margin-stream/main.go deleted file mode 100644 index 7768cdde49..0000000000 --- a/utils/binance-margin-stream/main.go +++ /dev/null @@ -1,35 +0,0 @@ -package main - -import ( - "context" - "os" - "syscall" - "time" - - log "github.com/sirupsen/logrus" - - "github.com/c9s/bbgo/pkg/cmd/cmdutil" - "github.com/c9s/bbgo/pkg/exchange/binance" -) - -func main() { - log.SetLevel(log.DebugLevel) - - ctx, cancel := context.WithCancel(context.Background()) - - // gobinance.NewClient(os.Getenv("BINANCE_API_KEY"), os.Getenv("BINANCE_API_SECRET")) - - ex := binance.New(os.Getenv("BINANCE_API_KEY"), os.Getenv("BINANCE_API_SECRET")) - ex.UseMargin(true) - stream := ex.NewStream() - - if err := stream.Connect(ctx); err != nil { - log.Fatal(err) - } - - cmdutil.WaitForSignal(ctx, syscall.SIGINT, syscall.SIGTERM) - cancel() - time.Sleep(5 * time.Second) - - return -} diff --git a/utils/changelog.sh b/utils/changelog.sh new file mode 100755 index 0000000000..727c082250 --- /dev/null +++ b/utils/changelog.sh @@ -0,0 +1,58 @@ +#!/bin/bash +# Generate a Markdown change log of pull requests from commits between two tags +# Author: Russell Heimlich +# URL: https://gist.github.com/kingkool68/09a201a35c83e43af08fcbacee5c315a + +# HOW TO USE +# Copy this script to a directory under Git version control +# Make the script executable i.e. chmod +x changelog.sh +# Run it! ./changelog.sh +# Check CHANGELOG.md to see your results + +# Repo URL to base links off of +REPOSITORY_URL=https://github.com/c9s/bbgo + +# Get a list of all tags in reverse order +# Assumes the tags are in version format like v1.2.3 +GIT_TAGS=$(git tag -l --sort=-version:refname) + +# Make the tags an array +TAGS=($GIT_TAGS) +LATEST_TAG=${TAGS[0]} +PREVIOUS_TAG=${TAGS[1]} + +# If you want to specify your own two tags to compare, uncomment and enter them below +PREVIOUS_TAG=$LATEST_TAG +LATEST_TAG=main + +# Get a log of commits that occured between two tags +# We only get the commit hash so we don't have to deal with a bunch of ugly parsing +# See Pretty format placeholders at https://git-scm.com/docs/pretty-formats +COMMITS=$(git log $PREVIOUS_TAG..$LATEST_TAG --pretty=format:"%H") + +# Store our changelog in a variable to be saved to a file at the end +MARKDOWN="[Full Changelog]($REPOSITORY_URL/compare/$PREVIOUS_TAG...$LATEST_TAG)" +MARKDOWN+='\n' + +# Loop over each commit and look for merged pull requests +for COMMIT in $COMMITS; do + # Get the subject of the current commit + SUBJECT=$(git log -1 ${COMMIT} --pretty=format:"%s") + + # If the subject contains "Merge pull request #xxxxx" then it is deemed a pull request + PULL_REQUEST=$( grep -Eo "Merge pull request #[[:digit:]]+" <<< "$SUBJECT" ) + if [[ $PULL_REQUEST ]]; then + # Perform a substring operation so we're left with just the digits of the pull request + PULL_NUM=${PULL_REQUEST#"Merge pull request #"} + # AUTHOR_NAME=$(git log -1 ${COMMIT} --pretty=format:"%an") + # AUTHOR_EMAIL=$(git log -1 ${COMMIT} --pretty=format:"%ae") + + # Get the body of the commit + BODY=$(git log -1 ${COMMIT} --pretty=format:"%b") + MARKDOWN+='\n' + MARKDOWN+=" - [#$PULL_NUM]($REPOSITORY_URL/pull/$PULL_NUM): $BODY" + fi +done + +# Save our markdown to a file +echo -e $MARKDOWN diff --git a/utils/embed/main.go b/utils/embed/main.go new file mode 100644 index 0000000000..1d45db115b --- /dev/null +++ b/utils/embed/main.go @@ -0,0 +1,161 @@ +package main + +import ( + "bytes" + "flag" + "fmt" + "io/ioutil" + "log" + "os" + "path/filepath" + "strings" + "text/template" +) + +var funcs = map[string]interface{}{ + "formatBytes": formatBytes, +} + +var ( + tmpl = template.Must(template.New("").Funcs(funcs).Parse(`{{- if .Tag -}} // +build {{ .Tag }} {{- end }} + +// Code generated by "embed"; DO NOT EDIT. +package {{ .Package }} + +import ( + "bytes" + "errors" + "net/http" + "os" + "time" +) + +var assets = map[string][]byte{} + +var FS = &fs{} + +type fs struct {} + +func (fs *fs) Open(name string) (http.File, error) { + if name == "/" { + return fs, nil; + } + b, ok := assets[name] + if !ok { + return nil, os.ErrNotExist + } + return &file{name: name, size: len(b), Reader: bytes.NewReader(b)}, nil +} + +func (fs *fs) Close() error { return nil } +func (fs *fs) Read(p []byte) (int, error) { return 0, nil } +func (fs *fs) Seek(offset int64, whence int) (int64, error) { return 0, nil } +func (fs *fs) Stat() (os.FileInfo, error) { return fs, nil } +func (fs *fs) Name() string { return "/" } +func (fs *fs) Size() int64 { return 0 } +func (fs *fs) Mode() os.FileMode { return 0755} +func (fs *fs) ModTime() time.Time{ return time.Time{} } +func (fs *fs) IsDir() bool { return true } +func (fs *fs) Sys() interface{} { return nil } +func (fs *fs) Readdir(count int) ([]os.FileInfo, error) { + files := []os.FileInfo{} + for name, data := range assets { + files = append(files, &file{name: name, size: len(data), Reader: bytes.NewReader(data)}) + } + return files, nil +} + +type file struct { + name string + size int + *bytes.Reader +} + +func (f *file) Close() error { return nil } +func (f *file) Readdir(count int) ([]os.FileInfo, error) { return nil, errors.New("readdir is not supported") } +func (f *file) Stat() (os.FileInfo, error) { return f, nil } +func (f *file) Name() string { return f.name } +func (f *file) Size() int64 { return int64(f.size) } +func (f *file) Mode() os.FileMode { return 0644 } +func (f *file) ModTime() time.Time{ return time.Time{} } +func (f *file) IsDir() bool { return false } +func (f *file) Sys() interface{} { return nil } + +`)) +) + +// Embed is a helper function that embeds assets from the given directories +// into a Go source file. It is designed to be called from some generator +// script, see example project to find out how it can be used. +func Embed(file string, dirs ...string) error { + var buf bytes.Buffer + + // execute template + if err := tmpl.Execute(&buf, struct { + Package string + Tag string + }{ + Package: packageName, + Tag: tag, + }); err != nil { + return err + } + + w, err := os.Create(file) + if err != nil { + return err + } + + defer w.Close() + + fmt.Fprintln(w, buf.String()) + fmt.Fprintln(w, `func init() {`) + + for _, dir := range dirs { + filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if info.IsDir() { + return nil + } + + log.Printf("packing %s...", path) + + b, err := ioutil.ReadFile(path) + if err != nil { + return err + } + + path = filepath.ToSlash(path) + fmt.Fprintf(w, ` assets[%q] = []byte{`, strings.TrimPrefix(path, dir)) + fmt.Fprintf(w, formatBytes(b)) + fmt.Fprintln(w, `}`) + return nil + }) + } + + fmt.Fprintln(w, `}`) + + return nil +} + +func formatBytes(s []byte) string { + var builder strings.Builder + for _, v := range s { + builder.WriteString(fmt.Sprintf("0x%02x, ", int(v))) + } + return builder.String() +} + +var packageName string +var outputFile string +var tag string + +func main() { + flag.StringVar(&packageName, "package", "", "package name") + flag.StringVar(&tag, "tag", "", "build tag in the generated file") + flag.StringVar(&outputFile, "output", "assets.go", "output filename") + flag.Parse() + args := flag.Args() + if err := Embed(outputFile, args...); err != nil { + log.Fatal(err) + } +} diff --git a/utils/generate-new-migration.sh b/utils/generate-new-migration.sh new file mode 100755 index 0000000000..75bd8e6101 --- /dev/null +++ b/utils/generate-new-migration.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -e +rockhopper --config rockhopper_sqlite.yaml create --type sql $1 +rockhopper --config rockhopper_mysql.yaml create --type sql $1 diff --git a/utils/generate-version-file.sh b/utils/generate-version-file.sh new file mode 100755 index 0000000000..f2a1a3ed90 --- /dev/null +++ b/utils/generate-version-file.sh @@ -0,0 +1,29 @@ +#!/bin/bash +PACKAGE_NAME=version +REF=$(git show -s --format=%h -1) + +if [[ -z $VERSION ]] ; then + VERSION=$(git describe --tags) +fi + +VERSION=$VERSION-$REF + +if [[ -z $BUILD_FLAGS ]] ; then + BUILD_FLAGS=release +fi + + +if [[ -n $VERSION_SUFFIX ]] ; then + VERSION=${VERSION}${VERSION_SUFFIX} +fi + +cat <