Skip to content

Commit 87dfe13

Browse files
authored
Merge branch 'main' into feature/aqua_ms_changes_2
2 parents c58b2f5 + 277cb8e commit 87dfe13

File tree

5 files changed

+34
-36
lines changed

5 files changed

+34
-36
lines changed

.github/workflows/run-unittests-py39-py310.yml renamed to .github/workflows/run-unittests-py310-py311.yml

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
name: "[Py3.9-3.11] - All Unit Tests"
1+
name: "[Py3.10-3.11] - All Unit Tests"
22

33
on:
44
workflow_dispatch:
@@ -33,15 +33,14 @@ jobs:
3333
strategy:
3434
fail-fast: false
3535
matrix:
36-
python-version: ["3.9", "3.10", "3.11"]
36+
python-version: ["3.10", "3.11"]
3737
name: ["unitary", "slow_tests"]
3838
include:
3939
- name: "unitary"
4040
test-path: "tests/unitary"
4141
# `model` tests running in "slow_tests",
4242
# `feature_store` tests has its own test suite
43-
# `forecast` tests not supported in python 3.9,3.10 (automlx dependency). Tests are running in python3.8 test env, see run-unittests-py38-cov-report.yml
44-
# 'pii' tests run only with py3.8, 'datapane' library conflicts with pandas>2.2.0, which used in py3.9/3.10 setup
43+
# `forecast` tests not run in this suite
4544
# 'hpo' tests hangs if run together with all unitary tests. Tests running in separate command before running all unitary
4645
ignore-path: |
4746
--ignore tests/unitary/with_extras/model \

.github/workflows/run-unittests-py38-cov-report.yml renamed to .github/workflows/run-unittests-py39-cov-report.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
name: "[Py3.8][COV REPORT] - All Unit Tests"
1+
name: "[Py3.9][COV REPORT] - All Unit Tests"
22

33
on:
44
workflow_dispatch:
@@ -26,7 +26,7 @@ env:
2626

2727
jobs:
2828
test:
29-
name: python 3.8, ${{ matrix.name }}
29+
name: python 3.9, ${{ matrix.name }}
3030
runs-on: ubuntu-latest
3131
timeout-minutes: 90
3232

@@ -58,7 +58,7 @@ jobs:
5858

5959
- uses: actions/setup-python@v5
6060
with:
61-
python-version: "3.8"
61+
python-version: "3.9"
6262
cache: "pip"
6363
cache-dependency-path: |
6464
pyproject.toml
@@ -71,7 +71,7 @@ jobs:
7171
name: "Test env setup"
7272
timeout-minutes: 30
7373

74-
# Installing pii deps for python3.8 test setup only, it will not work with python3.9/3.10, because
74+
# Installing pii deps for python3.9 test setup only, it will not work with python3.9/3.10, because
7575
# 'datapane' library conflicts with pandas>2.2.0, which used in py3.9/3.10 setup
7676
- name: "Install PII dependencies"
7777
run: |

ads/opctl/operator/lowcode/forecast/model/automlx.py

Lines changed: 21 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -249,17 +249,18 @@ def _generate_report(self):
249249
self.explain_model()
250250

251251
global_explanation_section = None
252-
if self.spec.explanations_accuracy_mode != SpeedAccuracyMode.AUTOMLX:
253-
# Convert the global explanation data to a DataFrame
254-
global_explanation_df = pd.DataFrame(self.global_explanation)
255252

256-
self.formatted_global_explanation = (
257-
global_explanation_df / global_explanation_df.sum(axis=0) * 100
258-
)
259-
self.formatted_global_explanation = self.formatted_global_explanation.rename(
260-
{self.spec.datetime_column.name: ForecastOutputColumns.DATE},
261-
axis=1,
262-
)
253+
# Convert the global explanation data to a DataFrame
254+
global_explanation_df = pd.DataFrame(self.global_explanation)
255+
256+
self.formatted_global_explanation = (
257+
global_explanation_df / global_explanation_df.sum(axis=0) * 100
258+
)
259+
260+
self.formatted_global_explanation.rename(
261+
columns={self.spec.datetime_column.name: ForecastOutputColumns.DATE},
262+
inplace=True,
263+
)
263264

264265
aggregate_local_explanations = pd.DataFrame()
265266
for s_id, local_ex_df in self.local_explanation.items():
@@ -428,7 +429,9 @@ def explain_model(self):
428429
# Use the MLExplainer class from AutoMLx to generate explanations
429430
explainer = automlx.MLExplainer(
430431
self.models[s_id]["model"],
431-
self.datasets.additional_data.get_data_for_series(series_id=s_id)
432+
self.datasets.additional_data.get_data_for_series(
433+
series_id=s_id
434+
)
432435
.drop(self.spec.datetime_column.name, axis=1)
433436
.head(-self.spec.horizon)
434437
if self.spec.additional_data
@@ -463,6 +466,13 @@ def explain_model(self):
463466

464467
# Store the explanations in the local_explanation dictionary
465468
self.local_explanation[s_id] = explanations_df
469+
470+
self.global_explanation[s_id] = dict(
471+
zip(
472+
self.local_explanation[s_id].columns,
473+
np.nanmean((self.local_explanation[s_id]), axis=0),
474+
)
475+
)
466476
else:
467477
# Fall back to the default explanation generation method
468478
super().explain_model()

ads/opctl/operator/lowcode/forecast/model/base_model.py

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -503,6 +503,7 @@ def _save_report(
503503
f2.write(f1.read())
504504

505505
# forecast csv report
506+
# todo: add test data into forecast.csv
506507
# if self.spec.test_data is not None:
507508
# test_data_dict = test_data.get_dict_by_series()
508509
# for series_id, test_data_values in test_data_dict.items():
@@ -772,14 +773,6 @@ def explain_model(self):
772773
logger.warn(
773774
"No explanations generated. Ensure that additional data has been provided."
774775
)
775-
elif (
776-
self.spec.model == SupportedModels.AutoMLX
777-
and self.spec.explanations_accuracy_mode
778-
== SpeedAccuracyMode.AUTOMLX
779-
):
780-
logger.warning(
781-
"Global explanations not available for AutoMLX models with inherent explainability"
782-
)
783776
else:
784777
self.global_explanation[s_id] = dict(
785778
zip(

pyproject.toml

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,6 @@ classifiers = [
4545
"Intended Audience :: Developers",
4646
"License :: OSI Approved :: Universal Permissive License (UPL)",
4747
"Operating System :: OS Independent",
48-
"Programming Language :: Python :: 3.8",
4948
"Programming Language :: Python :: 3.9",
5049
"Programming Language :: Python :: 3.10",
5150
"Programming Language :: Python :: 3.11",
@@ -66,8 +65,7 @@ dependencies = [
6665
"numpy>=1.19.2,<2.0.0",
6766
"oci>=2.144.1",
6867
"ocifs>=1.1.3",
69-
"pandas>1.2.1; python_version<'3.9'", # starting pandas v2.1.0 requires-python = '>=3.9'
70-
"pandas>=2.2.0; python_version>='3.9'",
68+
"pandas>=2.2.0",
7169
"psutil>=5.7.2",
7270
"python_jsonschema_objects>=0.3.13",
7371
"requests",
@@ -145,7 +143,7 @@ torch = [
145143
"torchvision"
146144
]
147145
viz = [
148-
"bokeh>=3.0.0,<3.2.0", # starting 3.2.0 bokeh not supporting python3.8; relax after ADS will drop py3.8 support
146+
"bokeh",
149147
"folium>=0.12.1",
150148
"graphviz<0.17",
151149
"scipy>=1.5.4",
@@ -203,7 +201,7 @@ pii = [
203201
"scrubadub_spacy",
204202
"spacy-transformers==1.2.5",
205203
"spacy==3.6.1",
206-
"report-creator>=1.0.32",
204+
"report-creator>=1.0.37",
207205
]
208206
llm = ["langchain>=0.2", "langchain-community", "langchain_openai", "pydantic>=2,<3", "evaluate>=0.4.0"]
209207
aqua = ["jupyter_server"]
@@ -227,10 +225,8 @@ testsuite = [
227225
"pdfplumber",
228226
"py4j",
229227
"pyarrow>=15.0.0",
230-
"statsmodels; python_version=='3.8'",
231-
"statsmodels>=0.14.1; python_version>='3.9'", # cython3.0 compatibility added in v0.14.1
232-
"tables",
233-
"tables>3.9.0; python_version>='3.9'",
228+
"statsmodels>=0.14.1",
229+
"tables>3.9.0",
234230
"xlrd>=1.2.0",
235231
]
236232

0 commit comments

Comments
 (0)