Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DO NOT MERGE] Test RC provider packages for https://github.com/apache/airflow/issues/37890 #2124

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/ci-python-sdk.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ env:
SFTP_PASSWORD: ${{ secrets.SFTP_PASSWORD }}
AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS: True
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }}
AIRFLOW__CORE__ALLOWED_DESERIALIZATION_CLASSES: "airflow.* astro.*"
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }}
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }}
AIRFLOW_VAR_FOO: templated_file_name
Expand Down
2 changes: 1 addition & 1 deletion python-sdk/example_dags/example_dataframe_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def find_worst_covid_month(dfs: List[pd.DataFrame]):
"""
res = {}
for covid_month_data in dfs:
if ALLOWED_DESERIALIZATION_CLASSES == "airflow\\.* astro\\.*":
if ALLOWED_DESERIALIZATION_CLASSES == "airflow.* astro.*":
covid_month = datetime.fromtimestamp(covid_month_data.Date_YMD.iloc[0] / 1e3).strftime("%Y-%m")
else:
covid_month = covid_month_data.Date_YMD.iloc[0].__format__("%Y-%m")
Expand Down
4 changes: 2 additions & 2 deletions python-sdk/noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def test(session: nox.Session, airflow) -> None:
"""Run both unit and integration tests."""
env = {
"AIRFLOW_HOME": f"~/airflow-{airflow}-python-{session.python}",
"AIRFLOW__CORE__ALLOWED_DESERIALIZATION_CLASSES": "airflow\\.* astro\\.*",
"AIRFLOW__CORE__ALLOWED_DESERIALIZATION_CLASSES": "airflow.* astro.*",
}

session.install(f"apache-airflow~={airflow}")
Expand Down Expand Up @@ -73,7 +73,7 @@ def test_examples_by_dependency(session: nox.Session, extras):

env = {
"AIRFLOW_HOME": "~/airflow-latest-python-latest",
"AIRFLOW__CORE__ALLOWED_DESERIALIZATION_CLASSES": "airflow\\.* astro\\.*",
"AIRFLOW__CORE__ALLOWED_DESERIALIZATION_CLASSES": "airflow.* astro.*",
}

session.install("-e", f".[{pypi_deps}]")
Expand Down
26 changes: 13 additions & 13 deletions python-sdk/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ dependencies = [
"pandas<2.2.0", # Versions 2.2.0 seems to be breaking our tests, so pin it to this version temporarily.
"pyarrow",
"python-frontmatter",
"smart-open",
"smart-open<7.0.0",
"SQLAlchemy>=1.3.18",
"Flask-Session<0.6.0" # This release breaking our tests, let's pin it as a temporary workaround
]
Expand Down Expand Up @@ -55,7 +55,7 @@ tests = [
]
google = [
"protobuf",
"apache-airflow-providers-google>=6.4.0",
"apache-airflow-providers-google==10.16.0rc1",
"sqlalchemy-bigquery>=1.3.0",
"smart-open[gcs]>=5.2.1"
]
Expand All @@ -65,15 +65,15 @@ snowflake = [
"snowflake-connector-python[pandas]"
]
postgres = [
"apache-airflow-providers-postgres",
"apache-airflow-providers-postgres==5.10.2rc1",
]
amazon = [
"apache-airflow-providers-amazon>=5.0.0",
"apache-airflow-providers-amazon==8.19.0rc1",
"s3fs",
"smart-open[s3]>=5.2.1",
]
azure = [
"apache-airflow-providers-microsoft-azure",
"apache-airflow-providers-microsoft-azure==9.0.1rc1",
"azure-storage-blob",
"smart-open[azure]>=5.2.1",
]
Expand All @@ -86,7 +86,7 @@ ftp = [
"smart-open>=5.2.1",
]
openlineage = [
"apache-airflow-providers-openlineage>=1.4.0",
"apache-airflow-providers-openlineage==1.6.0rc1",
]

databricks = [
Expand All @@ -101,18 +101,18 @@ mssql = [
]

mysql = [
"apache-airflow-providers-mysql",
"apache-airflow-providers-mysql==5.5.4rc1",
]

duckdb = [
"airflow-provider-duckdb>=0.0.2",
]

all = [
"apache-airflow-providers-amazon",
"apache-airflow-providers-google>=6.4.0",
"apache-airflow-providers-amazon==8.19.0rc1",
"apache-airflow-providers-google==10.16.0rc1",
"apache-airflow-providers-ftp",
"apache-airflow-providers-postgres",
"apache-airflow-providers-postgres==5.10.2rc1",
"apache-airflow-providers-snowflake",
"apache-airflow-providers-sftp",
"smart-open[all]>=5.2.1",
Expand All @@ -125,12 +125,12 @@ all = [
"databricks-sql-connector<2.9.0",
"s3fs",
"protobuf",
"apache-airflow-providers-openlineage>=1.4.0",
"apache-airflow-providers-microsoft-azure",
"apache-airflow-providers-openlineage==1.6.0rc1",
"apache-airflow-providers-microsoft-azure==9.0.1rc1",
"azure-storage-blob",
"apache-airflow-providers-microsoft-mssql>=3.2",
"airflow-provider-duckdb>=0.0.2",
"apache-airflow-providers-mysql"
"apache-airflow-providers-mysql==5.5.4rc1",
]
doc = [
"myst-parser>=0.17",
Expand Down
Loading