Skip to content

Commit

Permalink
MAINT: Numpy 2.0 compatibility (#264)
Browse files Browse the repository at this point in the history
* version updates and test fixes
* black version update
* add build_wheels tag input
* fix up/download_artifactsv4 errors
* limit py39 > numpy1
* skip test_summaries_after_fillna for mac os
* skip tests on arm64
* no cp39 arm64 wheels
  • Loading branch information
stefan-jansen authored Sep 26, 2024
1 parent e945ae1 commit f5abd98
Show file tree
Hide file tree
Showing 100 changed files with 440 additions and 284 deletions.
70 changes: 38 additions & 32 deletions .github/workflows/build_wheels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,17 @@ name: PyPI
on:
workflow_dispatch:
inputs:
publish_to_pypi:
description: 'Publish to PyPI?'
target:
type: choice
description: 'Package Index'
required: true
type: boolean
default: false
default: 'TESTPYPI'
options: [ 'TESTPYPI', 'PYPI' ]
version:
type: string
description: 'Version tag'
required: true
default: '3.1'

jobs:
build_wheels:
Expand All @@ -17,19 +23,16 @@ jobs:
fail-fast: false
matrix:
os: [ ubuntu-latest , windows-latest, macos-latest ]
python: [ "cp38", "cp39", "cp310", "cp311" ]
python: [ "cp39", "cp310", "cp311", "cp312" ]
arch: [ auto64 ]

steps:
- name: Checkout zipline
uses: actions/checkout@v4
with:
fetch-depth: 0

# - name: Setup Python
# uses: actions/setup-python@v5
# with:
# python-version: ${{ matrix.python }}
fetch-tags: true
ref: ${{ github.event.inputs.version }}

- name: Set Xcode version
uses: maxim-lobanov/setup-xcode@v1
Expand All @@ -41,8 +44,6 @@ jobs:
if: runner.os != 'Windows'
uses: pypa/[email protected]
env:
CIBW_BEFORE_ALL_LINUX: ./tools/install_talib.sh
CIBW_BEFORE_ALL_MACOS: brew install ta-lib
CIBW_ARCHS_LINUX: ${{ matrix.arch }}
CIBW_ARCHS_MACOS: x86_64 arm64
CIBW_BUILD: "${{ matrix.python }}-*"
Expand All @@ -59,58 +60,63 @@ jobs:
uses: pypa/[email protected]
env:
CIBW_BUILD: "${{ matrix.python }}-win_amd64"
CIBW_BEFORE_TEST_WINDOWS: >
call "c:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" amd64 &&
call ./tools/install_talib.bat

- name: Store artifacts
uses: actions/upload-artifact@v4
with:
name: my-artifact-${{ matrix.os }}-${{ matrix.python }}-${{ matrix.arch }}.whl
path: ./wheelhouse/*.whl


build_sdist:
name: Build source distribution
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
fetch-tags: true
ref: ${{ github.event.inputs.version }}

- uses: actions/setup-python@v5
name: Install Python
with:
python-version: '3.11'

- name: Build sdist
run: |
pip install -U pip setuptools build
python -m build --sdist
- uses: actions/upload-artifact@v4
- name: upload sdist
uses: actions/upload-artifact@v4
with:
path: dist/*.tar.gz
name: my-artifact-sdist
path: ./dist/*.tar.gz

upload_pypi:
needs: [ build_wheels, build_sdist ]
runs-on: ubuntu-latest
steps:
- uses: actions/download-artifact@v4
- name: Download All Artifacts
uses: actions/download-artifact@v4
with:
name: artifact
path: dist

- name: publish to testpypi
pattern: my-artifact-*
path: artifacts/
merge-multiple: true
- name: Display structure of downloaded files
run: ls -R artifacts
- name: Publish to PyPI
if: ${{ github.event.inputs.target == 'PYPI' }}
uses: pypa/gh-action-pypi-publish@release/v1
if: ${{ inputs.publish_to_pypi == false }}
with:
user: __token__
password: ${{ secrets.TESTPYPI_TOKEN }}
repository_url: https://test.pypi.org/legacy/

- name: publish to pypi
password: ${{ secrets.PYPI_TOKEN }}
packages-dir: artifacts/
- name: Publish to PyPI - Test
if: ${{ github.event.inputs.target == 'TESTPYPI' }}
uses: pypa/gh-action-pypi-publish@release/v1
if: ${{ inputs.publish_to_pypi == true }}
with:
user: __token__
password: ${{ secrets.PYPI_TOKEN }}
password: ${{ secrets.TESTPYPI_TOKEN }}
repository-url: https://test.pypi.org/legacy/
skip-existing: true
verbose: true
packages-dir: artifacts/
2 changes: 1 addition & 1 deletion .github/workflows/ci_tests_full.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
with:
options: "--check --diff"
src: "./src ./tests"
version: "~=22.0"
version: '24.1'

flake8-lint:
name: Lint Check
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/ci_tests_quick.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ name: CI Tests - Quick
on:
workflow_dispatch:
push:
branches:
- main
pull_request:
branches:
- main
Expand All @@ -17,7 +19,7 @@ jobs:
with:
options: "--check --diff"
src: "./src ./tests"
version: "~=22.0"
version: '24.1'

flake8-lint:
name: Lint Check
Expand Down
8 changes: 4 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
default_language_version:
python: python3.11
python: python3.12
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: 'v4.4.0'
Expand All @@ -8,12 +8,12 @@ repos:
- id: check-merge-conflict
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/ambv/black
rev: 23.7.0
- repo: https://github.com/psf/black
rev: 24.1.0
hooks:
- id: black
additional_dependencies: ['click==8.0.4']
language_version: python3.11
language_version: python3.12
- repo: https://github.com/PyCQA/flake8
rev: '6.0.0'
hooks:
Expand Down
20 changes: 16 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,15 @@ by [Stefan Jansen](https://www.linkedin.com/in/applied-ai/) who is trying to kee
- **PyData Integration:** Input of historical data and output of performance statistics are based on Pandas DataFrames to integrate nicely into the existing PyData ecosystem.
- **Statistics and Machine Learning Libraries:** You can use libraries like matplotlib, scipy, statsmodels, and scikit-klearn to support development, analysis, and visualization of state-of-the-art trading systems.

> **Note:** Release 3.05 makes Zipline compatible with Numpy 2.0, which requires Pandas 2.2.2 or higher. If you are using an older version of Pandas, you will need to upgrade it. Other packages may also still take more time to catch up with the latest Numpy release.
> **Note:** Release 3.0 updates Zipline to use [pandas](https://pandas.pydata.org/pandas-docs/stable/whatsnew/v2.0.0.html) >= 2.0 and [SQLAlchemy](https://docs.sqlalchemy.org/en/20/) > 2.0. These are major version updates that may break existing code; please review the linked docs.
> **Note:** Release 2.4 updates Zipline to use [exchange_calendars](https://github.com/gerrymanoim/exchange_calendars) >= 4.2. This is a major version update and may break existing code (which we have tried to avoid but cannot guarantee). Please review the changes [here](https://github.com/gerrymanoim/exchange_calendars/issues/61).
## Installation

Zipline supports Python >= 3.8 and is compatible with current versions of the relevant [NumFOCUS](https://numfocus.org/sponsored-projects?_sft_project_category=python-interface) libraries, including [pandas](https://pandas.pydata.org/) and [scikit-learn](https://scikit-learn.org/stable/index.html).
Zipline supports Python >= 3.9 and is compatible with current versions of the relevant [NumFOCUS](https://numfocus.org/sponsored-projects?_sft_project_category=python-interface) libraries, including [pandas](https://pandas.pydata.org/) and [scikit-learn](https://scikit-learn.org/stable/index.html).

### Using `pip`

Expand Down Expand Up @@ -94,15 +96,25 @@ def handle_data(context, data):
long_mavg=long_mavg)
```

You can then run this algorithm using the Zipline CLI. But first, you need to download some market data with historical prices and trading volumes:
You can then run this algorithm using the Zipline CLI. But first, you need to download some market data with historical prices and trading volumes.

This will download asset pricing data from [NASDAQ](https://data.nasdaq.com/databases/WIKIP) (formerly [Quandl](https://www.nasdaq.com/about/press-center/nasdaq-acquires-quandl-advance-use-alternative-data)).

> This requires an API key, which you can get for free by signing up at [NASDAQ Data Link](https://data.nasdaq.com).
```bash
$ export QUANDL_API_KEY="your_key_here"
$ zipline ingest -b quandl
````

The following will
- stream the through the algorithm over the specified time range.
- save the resulting performance DataFrame as `dma.pickle`, which you can load and analyze from Python using, e.g., [pyfolio-reloaded](https://github.com/stefan-jansen/pyfolio-reloaded).

```bash
$ zipline run -f dual_moving_average.py --start 2014-1-1 --end 2018-1-1 -o dma.pickle --no-benchmark
```

This will download asset pricing data sourced from [Quandl](https://www.quandl.com/databases/WIKIP/documentation?anchor=companies) (since [acquisition](https://www.nasdaq.com/about/press-center/nasdaq-acquires-quandl-advance-use-alternative-data) hosted by NASDAQ), and stream it through the algorithm over the specified time range. Then, the resulting performance DataFrame is saved as `dma.pickle`, which you can load and analyze from Python.

You can find other examples in the [zipline/examples](https://github.com/stefan-jansen/zipline-reloaded/tree/main/src/zipline/examples) directory.

## Questions, suggestions, bugs?
Expand Down
53 changes: 37 additions & 16 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,12 @@ classifiers = [

license = { file = "LICENSE" }

requires-python = '>=3.8'
requires-python = '>=3.9'
dependencies = [
"numpy>=1.23.5, <2; python_version<'3.10'",
"numpy>=1.23.5; python_version<'3.12'",
"numpy>=1.26.0; python_version>='3.12'",
"pandas >=1.3.0,<3.0",
'alembic >=0.7.7',
'bcolz-zipline >=1.2.6',
'bottleneck >=1.0.0',
Expand All @@ -46,8 +50,6 @@ dependencies = [
'multipledispatch >=0.6.0',
'networkx >=2.0',
'numexpr >=2.6.1',
'numpy >=1.14.5,<2.0.0',
'pandas >=1.3',
'patsy >=0.4.0',
'python-dateutil >=2.4.2',
'python-interface >=1.5.3',
Expand All @@ -57,7 +59,8 @@ dependencies = [
'six >=1.10.0',
'sqlalchemy >=2',
'statsmodels >=0.6.1',
'ta-lib >=0.4.09',
# ta-lib is not available for Numpy 2.0 => optional
# 'ta-lib >=0.4.09',
'tables >=3.4.3',
'toolz >=0.8.2',
'exchange-calendars >=4.2.4'
Expand All @@ -75,7 +78,9 @@ requires = [
'wheel>=0.36.0',
'Cython>=0.29.21',
# 'Cython>=3',
'oldest-supported-numpy; python_version>="3.8"',
'numpy>=2.0.0rc1 ; python_version>"3.9"',
'numpy<2.0 ; python_version<="3.9"'
# 'oldest-supported-numpy; python_version>="3.9"',
]
build-backend = 'setuptools.build_meta'

Expand All @@ -92,20 +97,20 @@ test = [
'matplotlib >=1.5.3',
'responses >=0.9.0',
'pandas-datareader >=0.2.1',
'click <8.1.0',
# 'click <8.1.0',
'click',
'coverage',
'pytest-rerunfailures',
# the following are required to run tests using PostgreSQL instead of SQLite
# 'psycopg2',
# 'pytest-postgresql ==3.1.3'
]

dev = [
'flake8 >=3.9.1',
'black',
'black >=24.0b1',
'pre-commit >=2.12.1',
# 'Cython>=0.29.21,<3',
'Cython>=0.29.21',
'ruff'
]

docs = [
'Cython',
'Sphinx >=1.3.2',
Expand All @@ -114,7 +119,6 @@ docs = [
'pydata-sphinx-theme',
'sphinx_markdown_tables',
'm2r2'

]

[project.scripts]
Expand All @@ -137,19 +141,21 @@ local_scheme = 'dirty-tag'
"*" = ["*.pyi", "*.pyx", "*.pxi", "*.pxd"]

[tool.pytest.ini_options]
pythonpath = ['.']
testpaths = 'tests'
addopts = '-v'
filterwarnings = 'ignore::DeprecationWarning:pandas_datareader.compat'

[tool.cibuildwheel]
test-extras = "test"
test-command = "pytest -x --reruns 5 {package}/tests"
test-command = "pytest -x --reruns 5 {project}/tests"
build-verbosity = 3
environment = "GITHUB_ACTIONS=true"
skip = "cp39-macosx_arm64 cp39-macosx_universal2:arm64"

[tool.cibuildwheel.macos]
archs = ["x86_64", "arm64", "universal2"]
test-skip = ["*universal2:arm64"]
test-skip = "cp39* *-macosx_arm64 *-macosx_universal2:arm64"

[tool.cibuildwheel.linux]
archs = ["auto64"]
Expand All @@ -161,7 +167,7 @@ test-skip = ["*"]

[tool.black]
line-length = 88
target-version = ['py38', 'py39', 'py310', 'py311']
target-version = ['py39', 'py310', 'py311', 'py312']
exclude = '''
(
asv_bench/env
Expand All @@ -175,10 +181,22 @@ exclude = '''
)
'''

[tool.ruff.lint]
select = ["NPY201"]

[tool.tox]
legacy_tox_ini = """
[tox]
envlist = py{39,310}-pandas{13,14,15}, py{39,310,311,312}-pandas{20,21,22}
envlist =
py39-pandas{13,14,15}-numpy1
py310-pandas{13,14,15,20,21,22}-numpy1
py311-pandas{13,14,15,20,21,22}-numpy1
py312-pandas{13,14,15,20,21,22}-numpy1
py39-pandas222-numpy2
py310-pandas222-numpy2
py311-pandas222-numpy2
py312-pandas222-numpy2
isolated_build = True
skip_missing_interpreters = True
minversion = 3.23.0
Expand All @@ -204,6 +222,9 @@ deps =
pandas20: pandas>=2.0,<2.1
pandas21: pandas>=2.1,<2.2
pandas22: pandas>=2.2,<2.3
pandas222: pandas>=2.2.2,<2.3
numpy1: numpy>=1.23.5,<2.0
numpy2: numpy>=2.0,<2.1
commands =
pytest -n 4 --reruns 5 --cov={toxinidir}/src --cov-report term --cov-report=xml --cov-report=html:htmlcov {toxinidir}/tests
Expand Down
2 changes: 1 addition & 1 deletion src/zipline/_protocol.pxd
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
cimport numpy as np

import numpy; numpy.import_array()
from zipline.assets._assets cimport Asset


Expand Down
Loading

0 comments on commit f5abd98

Please sign in to comment.