Compare commits

...

627 Commits

Author SHA1 Message Date
github-actions[bot]
f3dee40d08 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-02-15 04:56:37 +00:00
github-actions[bot]
1d301b413c Update installed dependencies for Python 3.12 on ubuntu-latest 2026-02-15 04:53:27 +00:00
github-actions[bot]
7083042210 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-02-15 04:39:19 +00:00
github-actions[bot]
4ac0347e5a Update installed dependencies for Python 3.12 on windows-latest 2026-02-15 04:34:07 +00:00
github-actions[bot]
579f795f33 Update installed dependencies for Python 3.13 on windows-latest 2026-02-15 04:27:34 +00:00
github-actions[bot]
897eae6616 Update installed dependencies for Python 3.10 on windows-latest 2026-02-15 04:23:33 +00:00
github-actions[bot]
6f573938bf Update installed dependencies for Python 3.10 on ubuntu-latest 2026-02-15 04:07:39 +00:00
github-actions[bot]
cc903f7807 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-02-13 04:53:48 +00:00
github-actions[bot]
e88a464fab Update installed dependencies for Python 3.12 on ubuntu-latest 2026-02-13 04:51:33 +00:00
github-actions[bot]
7ddd7f0a58 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-02-13 04:38:36 +00:00
github-actions[bot]
9ecb642685 Update installed dependencies for Python 3.13 on windows-latest 2026-02-13 04:34:26 +00:00
github-actions[bot]
ce6ff2e0b5 Update installed dependencies for Python 3.12 on windows-latest 2026-02-13 04:31:15 +00:00
github-actions[bot]
b0886939ab Update installed dependencies for Python 3.10 on windows-latest 2026-02-13 04:28:42 +00:00
github-actions[bot]
ca0eb7b75b Update installed dependencies for Python 3.11 on windows-latest 2026-02-13 04:23:18 +00:00
github-actions[bot]
06111054a4 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-02-13 04:06:03 +00:00
github-actions[bot]
3b69d7a69c Update installed dependencies for Python 3.12 on ubuntu-latest 2026-02-12 06:25:13 +00:00
github-actions[bot]
ec25d5bce7 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-02-12 06:21:16 +00:00
github-actions[bot]
3eb01a5778 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-02-12 06:15:00 +00:00
github-actions[bot]
41016d6087 Update installed dependencies for Python 3.13 on windows-latest 2026-02-12 06:02:27 +00:00
github-actions[bot]
7fea33db97 Update installed dependencies for Python 3.12 on windows-latest 2026-02-12 06:02:06 +00:00
github-actions[bot]
02f8ca32de Update installed dependencies for Python 3.11 on windows-latest 2026-02-12 05:57:58 +00:00
github-actions[bot]
2c0f95df98 Update installed dependencies for Python 3.10 on windows-latest 2026-02-12 05:56:01 +00:00
github-actions[bot]
61742144cb Update installed dependencies for Python 3.10 on ubuntu-latest 2026-02-12 05:39:38 +00:00
github-actions[bot]
0b4d76f509 Update installed dependencies for Python 3.11 on windows-latest 2026-02-11 04:28:43 +00:00
github-actions[bot]
a2ba14114c Update installed dependencies for Python 3.11 on windows-latest 2026-02-09 04:44:23 +00:00
github-actions[bot]
c014969616 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-02-07 04:41:24 +00:00
github-actions[bot]
f0e2438cc1 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-02-07 04:40:59 +00:00
github-actions[bot]
bf53bd4a07 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-02-07 04:27:58 +00:00
github-actions[bot]
e0f24f0ba3 Update installed dependencies for Python 3.12 on windows-latest 2026-02-07 04:19:56 +00:00
github-actions[bot]
056ac5e4c6 Update installed dependencies for Python 3.11 on windows-latest 2026-02-07 04:17:57 +00:00
github-actions[bot]
85dedc331f Update installed dependencies for Python 3.13 on windows-latest 2026-02-07 04:16:35 +00:00
github-actions[bot]
2ba1c2da01 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-02-07 03:59:13 +00:00
github-actions[bot]
5efcccc953 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-02-05 04:48:31 +00:00
github-actions[bot]
309f4a8a4e Update installed dependencies for Python 3.12 on ubuntu-latest 2026-02-05 04:44:48 +00:00
github-actions[bot]
e6380356df Update installed dependencies for Python 3.11 on ubuntu-latest 2026-02-05 04:28:33 +00:00
github-actions[bot]
bef0c16f22 Update installed dependencies for Python 3.10 on windows-latest 2026-02-05 04:28:08 +00:00
github-actions[bot]
fd7b9a07ab Update installed dependencies for Python 3.13 on windows-latest 2026-02-05 04:27:13 +00:00
github-actions[bot]
41257f04bd Update installed dependencies for Python 3.11 on windows-latest 2026-02-05 04:25:01 +00:00
github-actions[bot]
16a1406b7c Update installed dependencies for Python 3.10 on ubuntu-latest 2026-02-05 04:03:49 +00:00
github-actions[bot]
fd5cdbe59a Update installed dependencies for Python 3.13 on ubuntu-latest 2026-02-03 04:50:52 +00:00
github-actions[bot]
15527dc220 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-02-03 04:49:26 +00:00
github-actions[bot]
9ddd6a9df5 Update installed dependencies for Python 3.13 on windows-latest 2026-02-03 04:35:38 +00:00
github-actions[bot]
76f6a13d2a Update installed dependencies for Python 3.11 on ubuntu-latest 2026-02-03 04:28:54 +00:00
github-actions[bot]
53baaded6d Update installed dependencies for Python 3.11 on windows-latest 2026-02-03 04:28:12 +00:00
github-actions[bot]
a883ec704b Update installed dependencies for Python 3.12 on windows-latest 2026-02-03 04:26:12 +00:00
github-actions[bot]
5c7a880deb Update installed dependencies for Python 3.10 on windows-latest 2026-02-03 04:24:53 +00:00
github-actions[bot]
12575f0e0d Update installed dependencies for Python 3.10 on ubuntu-latest 2026-02-03 04:01:37 +00:00
github-actions[bot]
0743babb84 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-02-01 06:37:56 +00:00
github-actions[bot]
d1d446374c Update installed dependencies for Python 3.13 on ubuntu-latest 2026-02-01 04:51:28 +00:00
github-actions[bot]
ac12bbae9c Update installed dependencies for Python 3.12 on ubuntu-latest 2026-02-01 04:47:01 +00:00
github-actions[bot]
d58f6395c6 Update installed dependencies for Python 3.10 on windows-latest 2026-02-01 04:34:01 +00:00
github-actions[bot]
4054d34e0e Update installed dependencies for Python 3.13 on windows-latest 2026-02-01 04:32:24 +00:00
github-actions[bot]
bf51f91ff5 Update installed dependencies for Python 3.12 on windows-latest 2026-02-01 04:30:11 +00:00
github-actions[bot]
ef07860f3c Update installed dependencies for Python 3.11 on windows-latest 2026-02-01 04:27:43 +00:00
github-actions[bot]
eb3890fd4b Update installed dependencies for Python 3.10 on ubuntu-latest 2026-02-01 04:08:04 +00:00
github-actions[bot]
6010067763 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-31 04:45:21 +00:00
github-actions[bot]
db0c266dc1 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-31 04:42:43 +00:00
github-actions[bot]
283c2d1a7b Update installed dependencies for Python 3.12 on windows-latest 2026-01-31 04:24:22 +00:00
github-actions[bot]
3f35fb2470 Update installed dependencies for Python 3.13 on windows-latest 2026-01-31 04:23:28 +00:00
github-actions[bot]
5cd3a99b5f Update installed dependencies for Python 3.10 on windows-latest 2026-01-31 04:23:02 +00:00
github-actions[bot]
5ec8e36f77 Update installed dependencies for Python 3.11 on windows-latest 2026-01-31 04:14:34 +00:00
github-actions[bot]
a46f9d5d78 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-31 03:57:16 +00:00
github-actions[bot]
ae86fe076e Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-29 06:57:10 +00:00
github-actions[bot]
bed2448078 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-29 04:41:09 +00:00
github-actions[bot]
233e214b61 Update installed dependencies for Python 3.12 on windows-latest 2026-01-29 04:33:46 +00:00
github-actions[bot]
172a547268 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-29 04:30:38 +00:00
github-actions[bot]
47af42c6a4 Update installed dependencies for Python 3.13 on windows-latest 2026-01-29 04:23:54 +00:00
github-actions[bot]
bfce7ae400 Update installed dependencies for Python 3.10 on windows-latest 2026-01-29 04:19:14 +00:00
github-actions[bot]
ff1eb9ef38 Update installed dependencies for Python 3.11 on windows-latest 2026-01-29 04:18:42 +00:00
github-actions[bot]
73029aa906 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-29 04:00:03 +00:00
github-actions[bot]
d06b36af28 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-28 02:43:17 +00:00
github-actions[bot]
b5cd00ad10 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-28 02:42:31 +00:00
github-actions[bot]
a195f342fa Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-28 02:32:11 +00:00
github-actions[bot]
86212cd8ed Update installed dependencies for Python 3.13 on windows-latest 2026-01-28 02:23:42 +00:00
github-actions[bot]
6f0059e257 Update installed dependencies for Python 3.10 on windows-latest 2026-01-28 02:21:42 +00:00
github-actions[bot]
3e7d48623b Update installed dependencies for Python 3.12 on windows-latest 2026-01-28 02:19:15 +00:00
github-actions[bot]
722e6dc83e Update installed dependencies for Python 3.11 on windows-latest 2026-01-28 02:18:12 +00:00
github-actions[bot]
0f6b1d032d Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-28 02:02:50 +00:00
github-actions[bot]
ba930078fe Update installed dependencies for Python 3.11 on windows-latest 2026-01-27 04:05:57 +00:00
github-actions[bot]
f12614e759 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-25 04:40:29 +00:00
github-actions[bot]
e96d9dc5c6 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-25 04:38:34 +00:00
github-actions[bot]
4368cf4527 Update installed dependencies for Python 3.13 on windows-latest 2026-01-25 04:20:46 +00:00
github-actions[bot]
b3096123ae Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-25 04:17:39 +00:00
github-actions[bot]
d214bbecf8 Update installed dependencies for Python 3.10 on windows-latest 2026-01-25 04:15:41 +00:00
github-actions[bot]
394152fc2f Update installed dependencies for Python 3.11 on windows-latest 2026-01-25 04:14:08 +00:00
github-actions[bot]
19e033e15d Update installed dependencies for Python 3.12 on windows-latest 2026-01-25 04:10:43 +00:00
github-actions[bot]
80a6e7a7ec Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-25 03:51:51 +00:00
github-actions[bot]
65b380c1a1 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-23 05:55:21 +00:00
github-actions[bot]
398860d998 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-23 05:50:39 +00:00
github-actions[bot]
8b1f4c631c Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-23 05:40:27 +00:00
github-actions[bot]
4a3c6700dc Update installed dependencies for Python 3.13 on windows-latest 2026-01-23 05:33:47 +00:00
github-actions[bot]
c3a2edce67 Update installed dependencies for Python 3.12 on windows-latest 2026-01-23 05:30:42 +00:00
github-actions[bot]
5f50f10cb2 Update installed dependencies for Python 3.10 on windows-latest 2026-01-23 05:29:14 +00:00
github-actions[bot]
ca799e8ca2 Update installed dependencies for Python 3.11 on windows-latest 2026-01-23 05:28:04 +00:00
github-actions[bot]
961a286216 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-23 05:08:13 +00:00
github-actions[bot]
5e52a7a682 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-23 04:08:12 +00:00
github-actions[bot]
8f68be42f8 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-23 04:04:41 +00:00
github-actions[bot]
de61437024 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-23 03:48:34 +00:00
github-actions[bot]
60232bfb73 Update installed dependencies for Python 3.13 on windows-latest 2026-01-23 03:43:06 +00:00
github-actions[bot]
5432439b7f Update installed dependencies for Python 3.11 on windows-latest 2026-01-23 03:39:11 +00:00
github-actions[bot]
4491ee01c2 Update installed dependencies for Python 3.12 on windows-latest 2026-01-23 03:38:38 +00:00
github-actions[bot]
381d5681c2 Update installed dependencies for Python 3.10 on windows-latest 2026-01-23 03:38:10 +00:00
github-actions[bot]
34009fc497 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-23 03:21:51 +00:00
github-actions[bot]
29b5f75c6a Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-22 04:50:59 +00:00
github-actions[bot]
0924c7854a Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-22 04:37:57 +00:00
github-actions[bot]
b4dd35a25a Update installed dependencies for Python 3.12 on windows-latest 2026-01-22 04:31:09 +00:00
github-actions[bot]
9f4940fc35 Update installed dependencies for Python 3.10 on windows-latest 2026-01-22 04:28:53 +00:00
github-actions[bot]
c0f933e2eb Update installed dependencies for Python 3.11 on windows-latest 2026-01-22 04:22:15 +00:00
github-actions[bot]
a5ec782122 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-22 04:08:05 +00:00
github-actions[bot]
abd19c037b Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-21 15:05:07 +00:00
github-actions[bot]
5ee41879d6 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-21 15:00:42 +00:00
github-actions[bot]
0141e1d661 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-21 14:58:51 +00:00
github-actions[bot]
75d962ad0d Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-21 14:55:05 +00:00
github-actions[bot]
155a4cef92 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-21 14:50:17 +00:00
github-actions[bot]
f7ed953755 Update installed dependencies for Python 3.12 on windows-latest 2026-01-21 14:47:55 +00:00
github-actions[bot]
2b5c5f8de0 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-21 14:44:32 +00:00
github-actions[bot]
47bc5b2923 Update installed dependencies for Python 3.12 on windows-latest 2026-01-21 14:42:13 +00:00
github-actions[bot]
3b27af285f Update installed dependencies for Python 3.13 on windows-latest 2026-01-21 14:41:10 +00:00
github-actions[bot]
344b22a7dd Update installed dependencies for Python 3.11 on windows-latest 2026-01-21 14:39:27 +00:00
github-actions[bot]
6d22922dc3 Update installed dependencies for Python 3.11 on windows-latest 2026-01-21 14:37:18 +00:00
github-actions[bot]
ff0c8b8073 Update installed dependencies for Python 3.10 on windows-latest 2026-01-21 14:37:02 +00:00
github-actions[bot]
6234d4608b Update installed dependencies for Python 3.13 on windows-latest 2026-01-21 14:33:46 +00:00
github-actions[bot]
ef4ecfb881 Update installed dependencies for Python 3.10 on windows-latest 2026-01-21 14:29:41 +00:00
github-actions[bot]
b7b007f8e5 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-21 14:16:40 +00:00
github-actions[bot]
f4d0b5d48f Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-21 14:12:58 +00:00
github-actions[bot]
e257232561 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-21 09:40:49 +00:00
github-actions[bot]
de8da18494 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-21 09:38:50 +00:00
github-actions[bot]
d0b2df514c Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-21 09:27:56 +00:00
github-actions[bot]
8af15bb80f Update installed dependencies for Python 3.12 on windows-latest 2026-01-21 09:23:41 +00:00
github-actions[bot]
d15b697efa Update installed dependencies for Python 3.13 on windows-latest 2026-01-21 09:23:03 +00:00
github-actions[bot]
c6715e8c4c Update installed dependencies for Python 3.10 on windows-latest 2026-01-21 09:18:22 +00:00
github-actions[bot]
1693d15456 Update installed dependencies for Python 3.11 on windows-latest 2026-01-21 09:17:45 +00:00
github-actions[bot]
0e8ed63b01 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-21 08:55:51 +00:00
github-actions[bot]
7256fddd37 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-21 08:54:10 +00:00
github-actions[bot]
a7b4323d03 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-21 08:51:40 +00:00
github-actions[bot]
ad31a4682e Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-21 08:50:08 +00:00
github-actions[bot]
8f956c596b Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-21 08:42:04 +00:00
github-actions[bot]
de8d34c5f3 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-21 08:36:12 +00:00
github-actions[bot]
93c64a9cda Update installed dependencies for Python 3.13 on windows-latest 2026-01-21 08:32:59 +00:00
github-actions[bot]
92a94d2073 Update installed dependencies for Python 3.12 on windows-latest 2026-01-21 08:30:17 +00:00
github-actions[bot]
28fae32e80 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-21 08:29:40 +00:00
github-actions[bot]
9084585a9c Update installed dependencies for Python 3.11 on windows-latest 2026-01-21 08:26:50 +00:00
github-actions[bot]
16cadb12b1 Update installed dependencies for Python 3.10 on windows-latest 2026-01-21 08:25:09 +00:00
github-actions[bot]
a989e60a85 Update installed dependencies for Python 3.13 on windows-latest 2026-01-21 08:24:06 +00:00
github-actions[bot]
dd7b107fea Update installed dependencies for Python 3.10 on windows-latest 2026-01-21 08:22:58 +00:00
github-actions[bot]
84d8ffb6c1 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-21 08:20:43 +00:00
github-actions[bot]
c63f882780 Update installed dependencies for Python 3.11 on windows-latest 2026-01-21 08:20:22 +00:00
github-actions[bot]
453c7c714c Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-21 08:17:57 +00:00
github-actions[bot]
8e56e1f9a2 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-21 08:09:59 +00:00
github-actions[bot]
6864897fc6 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-21 08:05:36 +00:00
github-actions[bot]
4c9a570fc9 Update installed dependencies for Python 3.12 on windows-latest 2026-01-21 08:02:05 +00:00
github-actions[bot]
6d928c22fa Update installed dependencies for Python 3.11 on windows-latest 2026-01-21 08:00:27 +00:00
github-actions[bot]
d952bc18e7 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-21 07:59:28 +00:00
github-actions[bot]
926624eb17 Update installed dependencies for Python 3.10 on windows-latest 2026-01-21 07:58:28 +00:00
github-actions[bot]
0bb9d32967 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-21 07:58:10 +00:00
github-actions[bot]
3e6aa17191 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-21 07:56:57 +00:00
github-actions[bot]
4ebd16b210 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-21 07:55:07 +00:00
github-actions[bot]
90eff402de Update installed dependencies for Python 3.12 on windows-latest 2026-01-21 07:43:07 +00:00
github-actions[bot]
b862c18fd7 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-21 07:41:27 +00:00
github-actions[bot]
46c1bfc1ca Update installed dependencies for Python 3.10 on windows-latest 2026-01-21 07:40:54 +00:00
github-actions[bot]
bae394dd0d Update installed dependencies for Python 3.11 on windows-latest 2026-01-21 07:38:27 +00:00
github-actions[bot]
b6484f51e6 Update installed dependencies for Python 3.12 on windows-latest 2026-01-21 07:37:52 +00:00
github-actions[bot]
140cc3f8fb Update installed dependencies for Python 3.10 on windows-latest 2026-01-21 07:36:08 +00:00
github-actions[bot]
dc57da5612 Update installed dependencies for Python 3.13 on windows-latest 2026-01-21 07:35:25 +00:00
github-actions[bot]
ea23be1a88 Update installed dependencies for Python 3.11 on windows-latest 2026-01-21 07:33:55 +00:00
github-actions[bot]
4af704e589 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-21 07:33:37 +00:00
github-actions[bot]
48c582c1c6 Update installed dependencies for Python 3.13 on windows-latest 2026-01-21 07:30:49 +00:00
github-actions[bot]
7328e8b50e Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-21 07:15:05 +00:00
github-actions[bot]
43bbf77039 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-21 07:14:33 +00:00
github-actions[bot]
bda71a0b1d Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-21 06:51:32 +00:00
github-actions[bot]
be43f4da1b Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-21 06:50:32 +00:00
github-actions[bot]
b76f03cad0 Update installed dependencies for Python 3.12 on windows-latest 2026-01-21 06:37:02 +00:00
github-actions[bot]
1dff814f4c Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-21 06:36:24 +00:00
github-actions[bot]
f2a8f0a8c7 Update installed dependencies for Python 3.13 on windows-latest 2026-01-21 06:29:40 +00:00
github-actions[bot]
10fad748a8 Update installed dependencies for Python 3.11 on windows-latest 2026-01-21 06:28:42 +00:00
github-actions[bot]
8d529fdf22 Update installed dependencies for Python 3.10 on windows-latest 2026-01-21 06:26:55 +00:00
github-actions[bot]
1a391c9ed9 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-21 06:14:26 +00:00
github-actions[bot]
f9565384f8 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-21 06:13:40 +00:00
github-actions[bot]
aff36c6693 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-21 06:06:15 +00:00
github-actions[bot]
4423cd9054 Update installed dependencies for Python 3.12 on windows-latest 2026-01-21 05:59:37 +00:00
github-actions[bot]
c4d441373f Update installed dependencies for Python 3.13 on windows-latest 2026-01-21 05:57:38 +00:00
github-actions[bot]
5243fd2b62 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-21 05:56:35 +00:00
github-actions[bot]
55babfe19a Update installed dependencies for Python 3.10 on windows-latest 2026-01-21 05:49:02 +00:00
github-actions[bot]
89b5aa8a45 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-21 05:45:02 +00:00
github-actions[bot]
ef914fee26 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-21 05:42:33 +00:00
github-actions[bot]
037efe7d33 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-21 05:36:45 +00:00
github-actions[bot]
86227fe08f Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-21 05:32:05 +00:00
github-actions[bot]
1641712fde Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-21 05:28:27 +00:00
github-actions[bot]
1f0d3ce2bc Update installed dependencies for Python 3.10 on windows-latest 2026-01-21 05:25:54 +00:00
github-actions[bot]
c26aeb7171 Update installed dependencies for Python 3.12 on windows-latest 2026-01-21 05:24:12 +00:00
github-actions[bot]
0d5af3bdd1 Update installed dependencies for Python 3.13 on windows-latest 2026-01-21 05:20:53 +00:00
github-actions[bot]
519b12db3b Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-21 05:17:40 +00:00
github-actions[bot]
c54660fe3c Update installed dependencies for Python 3.11 on windows-latest 2026-01-21 05:13:05 +00:00
github-actions[bot]
ba6671aa94 Update installed dependencies for Python 3.12 on windows-latest 2026-01-21 05:11:17 +00:00
github-actions[bot]
e0cb926767 Update installed dependencies for Python 3.13 on windows-latest 2026-01-21 05:07:51 +00:00
github-actions[bot]
5fd6d9e179 Update installed dependencies for Python 3.10 on windows-latest 2026-01-21 05:05:07 +00:00
github-actions[bot]
f827161ac5 Update installed dependencies for Python 3.11 on windows-latest 2026-01-21 05:04:12 +00:00
github-actions[bot]
63d96ca081 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-21 04:59:19 +00:00
github-actions[bot]
7ecd6c54aa Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-21 04:48:36 +00:00
github-actions[bot]
b59486e842 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-21 04:29:06 +00:00
github-actions[bot]
1e78ccfcd9 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-21 04:27:50 +00:00
github-actions[bot]
d711d7857f Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-21 04:16:22 +00:00
github-actions[bot]
d65683a2dc Update installed dependencies for Python 3.12 on windows-latest 2026-01-21 04:05:41 +00:00
github-actions[bot]
4a1621549a Update installed dependencies for Python 3.11 on windows-latest 2026-01-21 04:03:43 +00:00
github-actions[bot]
e831146e11 Update installed dependencies for Python 3.10 on windows-latest 2026-01-21 04:01:48 +00:00
github-actions[bot]
ced77366f2 Update installed dependencies for Python 3.13 on windows-latest 2026-01-21 03:59:43 +00:00
github-actions[bot]
a2c1301360 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-21 03:48:45 +00:00
github-actions[bot]
1bbc1ea3db Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-21 03:45:00 +00:00
github-actions[bot]
2b29bccbab Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-21 03:43:05 +00:00
github-actions[bot]
db4262934e Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-21 03:31:14 +00:00
github-actions[bot]
248b217b2e Update installed dependencies for Python 3.13 on windows-latest 2026-01-21 03:25:21 +00:00
github-actions[bot]
8ac10c1947 Update installed dependencies for Python 3.12 on windows-latest 2026-01-21 03:23:24 +00:00
github-actions[bot]
b39cad6e78 Update installed dependencies for Python 3.11 on windows-latest 2026-01-21 03:22:16 +00:00
github-actions[bot]
fc9e29c1ed Update installed dependencies for Python 3.11 on windows-latest 2026-01-21 03:18:56 +00:00
github-actions[bot]
1a39bfc0fd Update installed dependencies for Python 3.10 on windows-latest 2026-01-21 03:17:20 +00:00
github-actions[bot]
3200e542b3 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-21 03:03:52 +00:00
github-actions[bot]
9988645b02 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-21 02:47:30 +00:00
github-actions[bot]
ced1d99e55 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-21 02:44:31 +00:00
github-actions[bot]
ce1fdccb77 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-21 02:41:56 +00:00
github-actions[bot]
5155b242c5 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-21 02:40:13 +00:00
github-actions[bot]
c1f3b3b0ba Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-21 02:26:37 +00:00
github-actions[bot]
30a58c79d5 Update installed dependencies for Python 3.13 on windows-latest 2026-01-21 02:26:11 +00:00
github-actions[bot]
70940eaf4d Update installed dependencies for Python 3.12 on windows-latest 2026-01-21 02:25:43 +00:00
github-actions[bot]
a1864f8dc0 Update installed dependencies for Python 3.10 on windows-latest 2026-01-21 02:24:50 +00:00
github-actions[bot]
028de3fa00 Update installed dependencies for Python 3.12 on windows-latest 2026-01-21 02:24:15 +00:00
github-actions[bot]
4db2bfb496 Update installed dependencies for Python 3.10 on windows-latest 2026-01-21 02:21:50 +00:00
github-actions[bot]
ca0723ce02 Update installed dependencies for Python 3.11 on windows-latest 2026-01-21 02:19:36 +00:00
github-actions[bot]
cc2180fb8a Update installed dependencies for Python 3.13 on windows-latest 2026-01-21 02:18:56 +00:00
github-actions[bot]
936ffa3634 Update installed dependencies for Python 3.11 on windows-latest 2026-01-21 02:16:54 +00:00
github-actions[bot]
6c6b191454 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-21 02:02:56 +00:00
github-actions[bot]
4dac580b41 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-21 01:57:28 +00:00
github-actions[bot]
7b51a54291 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 17:27:27 +00:00
github-actions[bot]
9b8183ebec Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 17:22:55 +00:00
github-actions[bot]
69a6f614d5 Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 17:19:02 +00:00
github-actions[bot]
5eee4f1ea6 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 17:18:42 +00:00
github-actions[bot]
13783f107d Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 17:15:41 +00:00
github-actions[bot]
1b3cee0c1f Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 17:06:35 +00:00
github-actions[bot]
9f7b444c8f Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 17:05:24 +00:00
github-actions[bot]
cc6ccc9e6f Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 17:02:34 +00:00
github-actions[bot]
cc741e6599 Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 17:01:30 +00:00
github-actions[bot]
c5bb506edf Update installed dependencies for Python 3.10 on windows-latest 2026-01-20 16:57:58 +00:00
github-actions[bot]
c195b28b8d Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 16:53:31 +00:00
github-actions[bot]
a160d24729 Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 16:53:06 +00:00
github-actions[bot]
2a323881b2 Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 16:52:17 +00:00
github-actions[bot]
31aeef5f06 Update installed dependencies for Python 3.10 on windows-latest 2026-01-20 16:50:37 +00:00
github-actions[bot]
05adedbb3b Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 16:45:50 +00:00
github-actions[bot]
e08ba40133 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 16:38:46 +00:00
github-actions[bot]
749ae1c738 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 16:37:21 +00:00
github-actions[bot]
8a94585710 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 16:35:48 +00:00
github-actions[bot]
1fcf89efaa Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 16:34:40 +00:00
github-actions[bot]
c21116d27f Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 16:31:53 +00:00
github-actions[bot]
3c7e211a03 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 16:31:18 +00:00
github-actions[bot]
8e45c29302 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 16:29:17 +00:00
github-actions[bot]
c85047784e Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 16:24:14 +00:00
github-actions[bot]
03156f0137 Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 16:23:33 +00:00
github-actions[bot]
8739350051 Update installed dependencies for Python 3.10 on windows-latest 2026-01-20 16:21:23 +00:00
github-actions[bot]
1427198b51 Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 16:18:57 +00:00
github-actions[bot]
e50c84ce72 Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 16:17:43 +00:00
github-actions[bot]
f9da8e07cb Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 16:16:15 +00:00
github-actions[bot]
0850cfb0a9 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 16:14:23 +00:00
github-actions[bot]
da314a8733 Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 16:11:24 +00:00
github-actions[bot]
d4c6a03305 Update installed dependencies for Python 3.10 on windows-latest 2026-01-20 16:08:35 +00:00
github-actions[bot]
c15ed5fed9 Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 16:08:07 +00:00
github-actions[bot]
bfb62186be Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 16:06:57 +00:00
github-actions[bot]
7d7de8b1d7 Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 16:04:18 +00:00
github-actions[bot]
8bd09ccb51 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 16:01:07 +00:00
github-actions[bot]
4138db8c9d Update installed dependencies for Python 3.10 on windows-latest 2026-01-20 16:00:38 +00:00
github-actions[bot]
f6c4f6e9eb Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 15:57:15 +00:00
github-actions[bot]
23c5b53481 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 15:55:59 +00:00
github-actions[bot]
873cbea4f4 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 15:52:51 +00:00
github-actions[bot]
42a1477022 Update installed dependencies for Python 3.10 on windows-latest 2026-01-20 15:51:58 +00:00
github-actions[bot]
32bec11396 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 15:46:34 +00:00
github-actions[bot]
b9184dc01f Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 15:45:30 +00:00
github-actions[bot]
54c74bed73 Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 15:43:35 +00:00
github-actions[bot]
9bc3c51ac4 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 15:42:29 +00:00
github-actions[bot]
fd895b3796 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 15:40:13 +00:00
github-actions[bot]
467665fabf Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 15:33:37 +00:00
github-actions[bot]
f7a97581cb Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 15:29:15 +00:00
github-actions[bot]
2e09687c8f Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 15:26:49 +00:00
github-actions[bot]
264074b984 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 15:25:06 +00:00
github-actions[bot]
9cffeaf068 Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 15:22:02 +00:00
github-actions[bot]
dba2975355 Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 15:17:05 +00:00
github-actions[bot]
93a10e90c9 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 15:00:50 +00:00
github-actions[bot]
cbfc7c7d6f Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 14:27:31 +00:00
github-actions[bot]
b7406d8b65 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 14:26:43 +00:00
github-actions[bot]
33984639f8 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 14:10:11 +00:00
github-actions[bot]
a47b9a3f44 Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 14:08:49 +00:00
github-actions[bot]
07a63acf38 Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 14:01:51 +00:00
github-actions[bot]
b4859d8870 Update installed dependencies for Python 3.10 on windows-latest 2026-01-20 14:00:15 +00:00
github-actions[bot]
55a2a30f3d Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 13:58:36 +00:00
github-actions[bot]
278bf5b548 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 13:43:13 +00:00
github-actions[bot]
cc5ea0c168 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 13:36:29 +00:00
github-actions[bot]
b6ce840333 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 13:34:31 +00:00
github-actions[bot]
97b769ff62 Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 13:19:25 +00:00
github-actions[bot]
9cf2d0af06 Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 13:17:06 +00:00
github-actions[bot]
099273591c Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 13:15:40 +00:00
github-actions[bot]
61f914e898 Update installed dependencies for Python 3.10 on windows-latest 2026-01-20 13:06:17 +00:00
github-actions[bot]
b5942269a9 Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 13:04:23 +00:00
github-actions[bot]
1d355fe0b5 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 12:49:44 +00:00
github-actions[bot]
fb237ea738 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 12:33:23 +00:00
github-actions[bot]
858bd2323b Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 12:18:43 +00:00
github-actions[bot]
a0774f70fc Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 12:10:28 +00:00
github-actions[bot]
63fab6da65 Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 12:09:41 +00:00
github-actions[bot]
99966aca73 Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 12:08:26 +00:00
github-actions[bot]
6c28ba6c5d Update installed dependencies for Python 3.10 on windows-latest 2026-01-20 12:04:48 +00:00
github-actions[bot]
d28ef9d5a1 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 11:51:21 +00:00
github-actions[bot]
cd096a1386 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 10:24:47 +00:00
github-actions[bot]
7d9559aeeb Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 10:20:19 +00:00
github-actions[bot]
c2e79b9cdc Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 10:05:43 +00:00
github-actions[bot]
cb7fc18490 Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 10:04:13 +00:00
github-actions[bot]
9075598d4e Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 10:02:00 +00:00
github-actions[bot]
0763b6a1c1 Update installed dependencies for Python 3.10 on windows-latest 2026-01-20 09:59:46 +00:00
github-actions[bot]
56cb755e9e Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 09:57:18 +00:00
github-actions[bot]
0bc3841b67 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 09:38:34 +00:00
github-actions[bot]
c6904fc1fa Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 09:36:32 +00:00
github-actions[bot]
ad32fac79d Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 09:35:13 +00:00
github-actions[bot]
512641abcf Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 09:20:50 +00:00
github-actions[bot]
66561aed17 Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 09:17:24 +00:00
github-actions[bot]
dec289c035 Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 09:13:51 +00:00
github-actions[bot]
2248ecf6fe Update installed dependencies for Python 3.10 on windows-latest 2026-01-20 09:11:06 +00:00
github-actions[bot]
81b2a6526a Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 09:09:23 +00:00
github-actions[bot]
56bcea212a Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 08:51:27 +00:00
github-actions[bot]
d584f4dcb1 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 07:15:41 +00:00
github-actions[bot]
a5dc39f06e Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 07:12:53 +00:00
github-actions[bot]
dc327b38f3 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 07:08:02 +00:00
github-actions[bot]
53d546b73e Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 07:00:40 +00:00
github-actions[bot]
cbc8d0c94a Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 06:57:51 +00:00
github-actions[bot]
1ab20b5b7c Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 06:55:13 +00:00
github-actions[bot]
f893dba6a9 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 06:54:45 +00:00
github-actions[bot]
9a6e783199 Update installed dependencies for Python 3.10 on windows-latest 2026-01-20 06:49:06 +00:00
github-actions[bot]
a4b9392a69 Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 06:48:27 +00:00
github-actions[bot]
b96a25bae3 Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 06:45:33 +00:00
github-actions[bot]
dc6f191861 Update installed dependencies for Python 3.10 on windows-latest 2026-01-20 06:44:30 +00:00
github-actions[bot]
452ff33d88 Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 06:43:43 +00:00
github-actions[bot]
fd5c2a9dfd Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 06:41:17 +00:00
github-actions[bot]
7a1d7e380d Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 06:28:20 +00:00
github-actions[bot]
53b512547e Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 06:27:34 +00:00
github-actions[bot]
0936189dcd Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 06:03:34 +00:00
github-actions[bot]
4b2451e7c9 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 06:01:01 +00:00
github-actions[bot]
3782afa347 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 05:44:46 +00:00
github-actions[bot]
f75beb919d Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 05:40:12 +00:00
github-actions[bot]
f755a7a0da Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 05:37:46 +00:00
github-actions[bot]
5c1060715f Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 05:35:20 +00:00
github-actions[bot]
2c85192386 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 05:16:57 +00:00
github-actions[bot]
1e7a9dc345 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 04:18:56 +00:00
github-actions[bot]
69a66828a3 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 04:16:42 +00:00
github-actions[bot]
773345604a Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 04:15:24 +00:00
github-actions[bot]
6cca7d2d63 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 04:13:03 +00:00
github-actions[bot]
dd846715ec Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 04:11:57 +00:00
github-actions[bot]
3275867baa Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 04:11:31 +00:00
github-actions[bot]
52c68a9445 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 04:02:03 +00:00
github-actions[bot]
19477683c4 Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 04:00:24 +00:00
github-actions[bot]
8731dddc75 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 03:56:52 +00:00
github-actions[bot]
45787b4979 Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 03:54:52 +00:00
github-actions[bot]
fb6c90460b Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 03:53:43 +00:00
github-actions[bot]
442024d41f Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 03:53:06 +00:00
github-actions[bot]
da958f329a Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 03:52:17 +00:00
github-actions[bot]
bab9aed3aa Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 03:51:27 +00:00
github-actions[bot]
7e1f12a4c8 Update installed dependencies for Python 3.10 on windows-latest 2026-01-20 03:48:43 +00:00
github-actions[bot]
02f1919006 Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 03:46:26 +00:00
github-actions[bot]
6976ce0c8e Update installed dependencies for Python 3.10 on windows-latest 2026-01-20 03:45:46 +00:00
github-actions[bot]
eb679157d1 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 03:32:42 +00:00
github-actions[bot]
181dd2dad8 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 03:31:26 +00:00
github-actions[bot]
183e661079 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 03:29:37 +00:00
github-actions[bot]
d1868a8bdb Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-20 01:30:27 +00:00
github-actions[bot]
1526f67080 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-20 01:28:17 +00:00
github-actions[bot]
1088e3c6a0 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-20 01:11:01 +00:00
github-actions[bot]
df209585bb Update installed dependencies for Python 3.11 on windows-latest 2026-01-20 01:09:47 +00:00
github-actions[bot]
d949298287 Update installed dependencies for Python 3.13 on windows-latest 2026-01-20 01:05:40 +00:00
github-actions[bot]
7d5aa555eb Update installed dependencies for Python 3.12 on windows-latest 2026-01-20 01:02:36 +00:00
github-actions[bot]
6faa8b479b Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-20 00:40:47 +00:00
github-actions[bot]
628f33636f Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-19 18:31:49 +00:00
github-actions[bot]
c9726fb7ab Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-19 18:27:59 +00:00
github-actions[bot]
838cee5eb4 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-19 18:27:33 +00:00
github-actions[bot]
ab6a0ff105 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-19 18:23:26 +00:00
github-actions[bot]
2604961809 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-19 18:22:16 +00:00
github-actions[bot]
a3ec02fab0 Update installed dependencies for Python 3.12 on windows-latest 2026-01-19 18:16:50 +00:00
github-actions[bot]
a6d3878995 Update installed dependencies for Python 3.13 on windows-latest 2026-01-19 18:15:44 +00:00
github-actions[bot]
69672f624b Update installed dependencies for Python 3.11 on windows-latest 2026-01-19 18:10:37 +00:00
github-actions[bot]
763e4a3ae1 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-19 18:09:10 +00:00
github-actions[bot]
cef80a0212 Update installed dependencies for Python 3.13 on windows-latest 2026-01-19 18:05:10 +00:00
github-actions[bot]
fac55074ef Update installed dependencies for Python 3.12 on windows-latest 2026-01-19 18:04:35 +00:00
github-actions[bot]
6e8ede1d15 Update installed dependencies for Python 3.12 on windows-latest 2026-01-19 18:01:46 +00:00
github-actions[bot]
d77e98cb36 Update installed dependencies for Python 3.10 on windows-latest 2026-01-19 18:01:16 +00:00
github-actions[bot]
4514caffb4 Update installed dependencies for Python 3.10 on windows-latest 2026-01-19 17:59:49 +00:00
github-actions[bot]
f2f9c3b5b5 Update installed dependencies for Python 3.11 on windows-latest 2026-01-19 17:59:03 +00:00
github-actions[bot]
b314f18471 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-19 17:56:26 +00:00
github-actions[bot]
ee49ad7e57 Update installed dependencies for Python 3.10 on windows-latest 2026-01-19 17:55:54 +00:00
github-actions[bot]
c4755e167b Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-19 17:50:48 +00:00
github-actions[bot]
7760d0f2c3 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-19 17:45:13 +00:00
github-actions[bot]
2e6a150709 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-19 17:43:46 +00:00
github-actions[bot]
c685dd1bf9 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-19 17:39:36 +00:00
github-actions[bot]
f751f09e28 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-19 17:38:50 +00:00
github-actions[bot]
13e3d4da2b Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-19 17:32:07 +00:00
github-actions[bot]
b3a1262068 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-19 17:30:49 +00:00
github-actions[bot]
131aba9158 Update installed dependencies for Python 3.12 on windows-latest 2026-01-19 17:25:27 +00:00
github-actions[bot]
5ed43cbdf9 Update installed dependencies for Python 3.11 on windows-latest 2026-01-19 17:23:04 +00:00
github-actions[bot]
a41d0b1752 Update installed dependencies for Python 3.13 on windows-latest 2026-01-19 17:22:11 +00:00
github-actions[bot]
125d735968 Update installed dependencies for Python 3.11 on windows-latest 2026-01-19 17:21:14 +00:00
github-actions[bot]
551bdaf164 Update installed dependencies for Python 3.13 on windows-latest 2026-01-19 17:19:49 +00:00
github-actions[bot]
bc1085817e Update installed dependencies for Python 3.10 on windows-latest 2026-01-19 17:19:27 +00:00
github-actions[bot]
28067fdf5f Update installed dependencies for Python 3.10 on windows-latest 2026-01-19 17:18:45 +00:00
github-actions[bot]
8d125f7755 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-19 17:01:16 +00:00
github-actions[bot]
c7f3197d38 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-19 16:59:47 +00:00
github-actions[bot]
85bcb2ebbc Update installed dependencies for Python 3.11 on windows-latest 2026-01-19 15:52:03 +00:00
github-actions[bot]
259a0fa252 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-19 14:57:57 +00:00
github-actions[bot]
96349ba58c Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-19 14:51:15 +00:00
github-actions[bot]
e10e4e6d24 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-19 14:42:41 +00:00
github-actions[bot]
d91a7654da Update installed dependencies for Python 3.10 on windows-latest 2026-01-19 14:33:25 +00:00
github-actions[bot]
bfdfefa728 Update installed dependencies for Python 3.13 on windows-latest 2026-01-19 14:30:37 +00:00
github-actions[bot]
f6b43f0837 Update installed dependencies for Python 3.12 on windows-latest 2026-01-19 14:29:47 +00:00
github-actions[bot]
05c7767006 Update installed dependencies for Python 3.10 on windows-latest 2026-01-19 14:28:33 +00:00
github-actions[bot]
0ca166fce3 Update installed dependencies for Python 3.11 on windows-latest 2026-01-19 14:28:01 +00:00
github-actions[bot]
2b6e59d93b Update installed dependencies for Python 3.13 on windows-latest 2026-01-19 14:26:22 +00:00
github-actions[bot]
e2948f577a Update installed dependencies for Python 3.11 on windows-latest 2026-01-19 14:24:23 +00:00
github-actions[bot]
15049d6b23 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-19 14:11:36 +00:00
github-actions[bot]
62831146a9 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-19 14:06:01 +00:00
github-actions[bot]
598bcdd1b3 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-19 12:15:08 +00:00
github-actions[bot]
56570f8d8c Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-19 12:11:09 +00:00
github-actions[bot]
5118e66e80 Update installed dependencies for Python 3.12 on windows-latest 2026-01-19 12:08:55 +00:00
github-actions[bot]
094f0102c1 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-19 11:57:27 +00:00
github-actions[bot]
cf197b3766 Update installed dependencies for Python 3.13 on windows-latest 2026-01-19 11:56:16 +00:00
github-actions[bot]
c75bd5c203 Update installed dependencies for Python 3.11 on windows-latest 2026-01-19 11:52:25 +00:00
github-actions[bot]
6af1ea046e Update installed dependencies for Python 3.10 on windows-latest 2026-01-19 11:50:33 +00:00
github-actions[bot]
96bcc314a1 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-19 11:30:59 +00:00
github-actions[bot]
2d0d238806 Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-19 10:15:53 +00:00
github-actions[bot]
6114aa8c03 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-19 10:02:15 +00:00
github-actions[bot]
f30ae2200a Update installed dependencies for Python 3.13 on windows-latest 2026-01-19 09:47:58 +00:00
github-actions[bot]
2d393e6db0 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-19 09:47:03 +00:00
github-actions[bot]
477e74ca3c Update installed dependencies for Python 3.10 on windows-latest 2026-01-19 09:44:45 +00:00
github-actions[bot]
769e7c752c Update installed dependencies for Python 3.11 on windows-latest 2026-01-19 09:43:20 +00:00
github-actions[bot]
0b852845b9 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-19 09:21:19 +00:00
github-actions[bot]
d47f9a2bee Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-19 09:06:54 +00:00
github-actions[bot]
702880b8cf Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-19 08:48:28 +00:00
github-actions[bot]
662e7be4b0 Update installed dependencies for Python 3.11 on windows-latest 2026-01-19 08:45:05 +00:00
github-actions[bot]
b1c98effd5 Update installed dependencies for Python 3.12 on windows-latest 2026-01-19 08:43:13 +00:00
github-actions[bot]
3a3e7390db Update installed dependencies for Python 3.10 on windows-latest 2026-01-19 08:37:46 +00:00
github-actions[bot]
07011efa39 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-19 08:20:16 +00:00
github-actions[bot]
30de97c8a7 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-19 07:02:10 +00:00
github-actions[bot]
7e1242f8dd Update installed dependencies for Python 3.13 on ubuntu-latest 2026-01-19 06:55:01 +00:00
github-actions[bot]
829e565323 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-19 06:44:13 +00:00
github-actions[bot]
8a24a25190 Update installed dependencies for Python 3.12 on windows-latest 2026-01-19 06:41:26 +00:00
github-actions[bot]
b0e5225392 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-19 06:36:45 +00:00
github-actions[bot]
4dcac0424d Update installed dependencies for Python 3.10 on windows-latest 2026-01-19 06:33:43 +00:00
github-actions[bot]
43d3e2af31 Update installed dependencies for Python 3.11 on windows-latest 2026-01-19 06:31:36 +00:00
github-actions[bot]
023b674622 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-19 06:21:41 +00:00
github-actions[bot]
aa10362f76 Update installed dependencies for Python 3.10 on windows-latest 2026-01-19 06:18:29 +00:00
github-actions[bot]
4f24e1f87d Update installed dependencies for Python 3.12 on windows-latest 2026-01-19 06:17:25 +00:00
github-actions[bot]
5acf1fd0d4 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-19 06:16:17 +00:00
github-actions[bot]
943fddb700 Update installed dependencies for Python 3.11 on windows-latest 2026-01-19 06:10:08 +00:00
github-actions[bot]
43ed027d3b Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-19 05:53:10 +00:00
github-actions[bot]
3c04b4b906 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-19 04:33:13 +00:00
github-actions[bot]
fc95d14d96 Update installed dependencies for Python 3.12 on windows-latest 2026-01-19 04:15:03 +00:00
github-actions[bot]
eac8b2b415 Update installed dependencies for Python 3.11 on windows-latest 2026-01-19 04:13:57 +00:00
github-actions[bot]
b73c4c62ea Update installed dependencies for Python 3.10 on windows-latest 2026-01-19 04:08:31 +00:00
github-actions[bot]
7ef6253a2e Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-19 03:49:11 +00:00
github-actions[bot]
1d220749b7 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-17 15:24:33 +00:00
github-actions[bot]
4b14559a9b Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-17 15:08:06 +00:00
github-actions[bot]
25b5d43df8 Update installed dependencies for Python 3.12 on windows-latest 2026-01-17 15:03:20 +00:00
github-actions[bot]
a7f84f29cf Update installed dependencies for Python 3.10 on windows-latest 2026-01-17 14:59:41 +00:00
github-actions[bot]
71d9a6ffe6 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-17 14:37:44 +00:00
github-actions[bot]
40b3ce4906 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-17 04:33:33 +00:00
github-actions[bot]
7aa442d7cd Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-17 04:04:24 +00:00
github-actions[bot]
5d0a6c558f Update installed dependencies for Python 3.12 on windows-latest 2026-01-17 04:03:02 +00:00
github-actions[bot]
1784d14c93 Update installed dependencies for Python 3.10 on windows-latest 2026-01-17 04:01:17 +00:00
github-actions[bot]
d7773e7db3 Update installed dependencies for Python 3.11 on windows-latest 2026-01-17 03:55:49 +00:00
github-actions[bot]
5294f760ac Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-17 03:40:02 +00:00
github-actions[bot]
d40fb36e05 Update installed dependencies for Python 3.11 on windows-latest 2026-01-15 05:09:22 +00:00
github-actions[bot]
7f56524013 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-15 04:29:16 +00:00
github-actions[bot]
a793f4820c Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-15 04:13:02 +00:00
github-actions[bot]
9e6f030b46 Update installed dependencies for Python 3.12 on windows-latest 2026-01-15 04:06:48 +00:00
github-actions[bot]
fd02389fe9 Update installed dependencies for Python 3.10 on windows-latest 2026-01-15 04:00:47 +00:00
github-actions[bot]
e44678d2a4 Update installed dependencies for Python 3.11 on windows-latest 2026-01-15 03:56:49 +00:00
github-actions[bot]
cfa50479bb Update installed dependencies for Python 3.12 on windows-latest 2026-01-13 06:13:53 +00:00
github-actions[bot]
3e92aa724b Update installed dependencies for Python 3.10 on windows-latest 2026-01-13 06:09:56 +00:00
github-actions[bot]
d61a4fd134 Update installed dependencies for Python 3.10 on windows-latest 2026-01-13 06:06:14 +00:00
github-actions[bot]
aa7bc1ffad Update installed dependencies for Python 3.11 on windows-latest 2026-01-13 06:05:35 +00:00
github-actions[bot]
9db0dde352 Update installed dependencies for Python 3.12 on windows-latest 2026-01-13 06:03:59 +00:00
github-actions[bot]
923fd3f8f7 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-13 04:23:54 +00:00
github-actions[bot]
8f18f5b45b Update installed dependencies for Python 3.12 on windows-latest 2026-01-13 04:15:33 +00:00
github-actions[bot]
556f566a5a Update installed dependencies for Python 3.11 on windows-latest 2026-01-13 04:06:21 +00:00
github-actions[bot]
8267782825 Update installed dependencies for Python 3.10 on windows-latest 2026-01-13 03:59:18 +00:00
github-actions[bot]
4205fa813b Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-12 16:46:00 +00:00
github-actions[bot]
6de06dc1f1 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-12 16:32:24 +00:00
github-actions[bot]
15bb158944 Update installed dependencies for Python 3.12 on windows-latest 2026-01-12 16:28:54 +00:00
github-actions[bot]
7a3d7ef207 Update installed dependencies for Python 3.10 on windows-latest 2026-01-12 16:24:42 +00:00
github-actions[bot]
f818b1cc18 Update installed dependencies for Python 3.11 on windows-latest 2026-01-12 16:22:13 +00:00
github-actions[bot]
9d9c6149e0 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-12 16:06:50 +00:00
github-actions[bot]
674cf6f34a Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-12 14:39:41 +00:00
github-actions[bot]
85f1ae4b45 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-12 14:28:27 +00:00
github-actions[bot]
98539a0a72 Update installed dependencies for Python 3.12 on windows-latest 2026-01-12 14:24:34 +00:00
github-actions[bot]
5657200deb Update installed dependencies for Python 3.10 on windows-latest 2026-01-12 14:22:11 +00:00
github-actions[bot]
fc676b7aba Update installed dependencies for Python 3.11 on windows-latest 2026-01-12 14:16:39 +00:00
github-actions[bot]
217bdff0cc Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-12 13:58:41 +00:00
github-actions[bot]
5252cc2896 Update installed dependencies for Python 3.12 on windows-latest 2026-01-12 12:56:36 +00:00
github-actions[bot]
f39a9780cf Update installed dependencies for Python 3.10 on windows-latest 2026-01-12 12:47:24 +00:00
github-actions[bot]
398f11227b Update installed dependencies for Python 3.11 on windows-latest 2026-01-12 12:43:49 +00:00
github-actions[bot]
124e274341 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-12 12:27:02 +00:00
github-actions[bot]
c4b8c337df Update installed dependencies for Python 3.12 on windows-latest 2026-01-12 10:56:13 +00:00
github-actions[bot]
567f4a9399 Update installed dependencies for Python 3.11 on windows-latest 2026-01-12 10:54:40 +00:00
github-actions[bot]
eeddf004ec Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-12 10:36:03 +00:00
github-actions[bot]
546eb621af Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-11 04:35:24 +00:00
github-actions[bot]
3d8d718c4b Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-11 04:20:30 +00:00
github-actions[bot]
b264f04b11 Update installed dependencies for Python 3.10 on windows-latest 2026-01-11 04:17:26 +00:00
github-actions[bot]
d92fe045f9 Update installed dependencies for Python 3.12 on windows-latest 2026-01-11 04:14:17 +00:00
github-actions[bot]
e05ae5973b Update installed dependencies for Python 3.11 on windows-latest 2026-01-11 04:08:39 +00:00
github-actions[bot]
1e36194053 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-11 03:53:12 +00:00
github-actions[bot]
c781c68cef Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-10 16:43:57 +00:00
github-actions[bot]
e8f40f4e31 Update installed dependencies for Python 3.12 on windows-latest 2026-01-10 16:20:00 +00:00
github-actions[bot]
a8e5aced66 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-10 16:17:03 +00:00
github-actions[bot]
27c10923ac Update installed dependencies for Python 3.10 on windows-latest 2026-01-10 16:08:31 +00:00
github-actions[bot]
d73fba54da Update installed dependencies for Python 3.11 on windows-latest 2026-01-10 16:07:07 +00:00
github-actions[bot]
9ed4797859 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-10 15:46:30 +00:00
github-actions[bot]
33d0e3f1e3 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-10 13:43:03 +00:00
github-actions[bot]
a266fe8bbf Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-10 13:25:09 +00:00
github-actions[bot]
6167f6c1e7 Update installed dependencies for Python 3.12 on windows-latest 2026-01-10 13:23:53 +00:00
github-actions[bot]
7f5f0017ee Update installed dependencies for Python 3.10 on windows-latest 2026-01-10 13:17:28 +00:00
github-actions[bot]
ad706c9244 Update installed dependencies for Python 3.11 on windows-latest 2026-01-10 13:16:42 +00:00
github-actions[bot]
5456996015 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-10 12:58:59 +00:00
github-actions[bot]
d20ab683a8 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-10 11:53:48 +00:00
github-actions[bot]
3db891a894 Update installed dependencies for Python 3.12 on windows-latest 2026-01-10 11:32:49 +00:00
github-actions[bot]
a7efb9159f Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-10 11:30:41 +00:00
github-actions[bot]
ad009ce72e Update installed dependencies for Python 3.11 on windows-latest 2026-01-10 11:27:55 +00:00
github-actions[bot]
d0388c3fe1 Update installed dependencies for Python 3.10 on windows-latest 2026-01-10 11:24:01 +00:00
github-actions[bot]
b86c13e1eb Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-10 11:06:26 +00:00
github-actions[bot]
ba26b0b3e1 Update installed dependencies for Python 3.11 on windows-latest 2026-01-10 10:45:29 +00:00
github-actions[bot]
58fe076256 Update installed dependencies for Python 3.11 on macos-latest 2026-01-10 10:43:27 +00:00
github-actions[bot]
3ab9ce3cda Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-10 10:11:44 +00:00
github-actions[bot]
cec0cc42be Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-10 09:59:07 +00:00
github-actions[bot]
eb780e1328 Update installed dependencies for Python 3.12 on windows-latest 2026-01-10 09:56:44 +00:00
github-actions[bot]
686230587f Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-10 09:51:48 +00:00
github-actions[bot]
ded551a125 Update installed dependencies for Python 3.10 on windows-latest 2026-01-10 09:44:27 +00:00
github-actions[bot]
c02eb147f6 Update installed dependencies for Python 3.11 on windows-latest 2026-01-10 09:43:44 +00:00
github-actions[bot]
2580d7d662 Update installed dependencies for Python 3.12 on windows-latest 2026-01-10 09:38:49 +00:00
github-actions[bot]
dbc0f6d559 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-10 09:34:36 +00:00
github-actions[bot]
28fb389887 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-10 09:32:57 +00:00
github-actions[bot]
9669552402 Update installed dependencies for Python 3.10 on windows-latest 2026-01-10 09:25:50 +00:00
github-actions[bot]
4f6933e5ef Update installed dependencies for Python 3.11 on windows-latest 2026-01-10 09:23:54 +00:00
github-actions[bot]
5a907b93ed Update installed dependencies for Python 3.11 on macos-latest 2026-01-10 09:14:29 +00:00
github-actions[bot]
1157c2d081 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-10 09:07:03 +00:00
github-actions[bot]
3d4e827e4f Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-10 07:51:18 +00:00
github-actions[bot]
2f5bae733b Update installed dependencies for Python 3.11 on macos-latest 2026-01-10 07:36:52 +00:00
github-actions[bot]
8fa6578c3a Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-10 06:35:19 +00:00
github-actions[bot]
7dd998ac4a Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-10 06:24:19 +00:00
github-actions[bot]
42eb9bf43f Update installed dependencies for Python 3.12 on windows-latest 2026-01-10 06:19:41 +00:00
github-actions[bot]
c866753ec8 Update installed dependencies for Python 3.10 on windows-latest 2026-01-10 06:10:27 +00:00
github-actions[bot]
f1c923e2b1 Update installed dependencies for Python 3.11 on windows-latest 2026-01-10 06:09:19 +00:00
github-actions[bot]
8aaae8cacb Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-10 06:01:28 +00:00
github-actions[bot]
d07cbabdfc Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-10 05:52:58 +00:00
github-actions[bot]
8da9721368 Update installed dependencies for Python 3.12 on windows-latest 2026-01-10 05:48:08 +00:00
github-actions[bot]
9b940a1dca Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-10 05:46:27 +00:00
github-actions[bot]
78d11138a7 Update installed dependencies for Python 3.12 on windows-latest 2026-01-10 05:43:55 +00:00
github-actions[bot]
8d1e7f98a6 Update installed dependencies for Python 3.11 on windows-latest 2026-01-10 05:42:17 +00:00
github-actions[bot]
178b99ac5b Update installed dependencies for Python 3.11 on windows-latest 2026-01-10 05:38:29 +00:00
github-actions[bot]
ac2e215731 Update installed dependencies for Python 3.10 on windows-latest 2026-01-10 05:35:29 +00:00
github-actions[bot]
b86df95d29 Update installed dependencies for Python 3.10 on windows-latest 2026-01-10 05:32:48 +00:00
github-actions[bot]
bb871ebbb7 Update installed dependencies for Python 3.11 on macos-latest 2026-01-10 05:30:03 +00:00
github-actions[bot]
b747e4857d Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-10 05:19:22 +00:00
github-actions[bot]
75c7365d08 Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-10 05:15:33 +00:00
github-actions[bot]
a2fc917aba Update installed dependencies for Python 3.11 on macos-latest 2026-01-10 05:07:20 +00:00
github-actions[bot]
d18a67ecdf Update installed dependencies for Python 3.11 on macos-latest 2026-01-10 05:01:28 +00:00
github-actions[bot]
c682df0d04 Update installed dependencies for Python 3.12 on ubuntu-latest 2026-01-10 03:34:25 +00:00
github-actions[bot]
cd9750f67d Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-10 03:18:09 +00:00
github-actions[bot]
3f560e1521 Update installed dependencies for Python 3.10 on windows-latest 2026-01-10 03:15:28 +00:00
github-actions[bot]
71127a1bdb Update installed dependencies for Python 3.12 on windows-latest 2026-01-10 03:13:43 +00:00
github-actions[bot]
9b4626a54c Update installed dependencies for Python 3.11 on windows-latest 2026-01-10 03:05:34 +00:00
github-actions[bot]
8cea895aeb Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-10 02:49:56 +00:00
github-actions[bot]
c630929668 Update installed dependencies for Python 3.11 on macos-latest 2026-01-10 02:38:58 +00:00
github-actions[bot]
0eecc19018 Update installed dependencies for Python 3.12 on windows-latest 2026-01-09 16:18:54 +00:00
github-actions[bot]
d3f45a1f5b Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-09 16:16:55 +00:00
github-actions[bot]
a9bcf465d2 Update installed dependencies for Python 3.11 on windows-latest 2026-01-09 16:12:14 +00:00
github-actions[bot]
7e79f6aeec Update installed dependencies for Python 3.10 on windows-latest 2026-01-09 16:09:26 +00:00
github-actions[bot]
db999b1f8b Update installed dependencies for Python 3.10 on ubuntu-latest 2026-01-09 15:51:01 +00:00
github-actions[bot]
cb815d6eef Update installed dependencies for Python 3.11 on macos-latest 2026-01-09 15:39:08 +00:00
github-actions[bot]
e7277d8092 Update installed dependencies for Python 3.12 on windows-latest 2026-01-09 13:40:36 +00:00
github-actions[bot]
c9a6fb875c Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-09 13:34:38 +00:00
github-actions[bot]
c9a225e615 Update installed dependencies for Python 3.11 on windows-latest 2026-01-09 13:27:02 +00:00
github-actions[bot]
295ad7f7a9 Update installed dependencies for Python 3.10 on windows-latest 2026-01-09 13:24:26 +00:00
github-actions[bot]
fdb00491ab Update installed dependencies for Python 3.11 on macos-latest 2026-01-09 12:50:34 +00:00
github-actions[bot]
83f64ce9a4 Update installed dependencies for Python 3.11 on ubuntu-latest 2026-01-09 11:13:14 +00:00
github-actions[bot]
71e3504db8 Update installed dependencies for Python 3.12 on windows-latest 2026-01-09 11:00:07 +00:00
github-actions[bot]
b558803ece Update installed dependencies for Python 3.11 on windows-latest 2026-01-09 10:57:28 +00:00
github-actions[bot]
880e947216 Update installed dependencies for Python 3.10 on windows-latest 2026-01-09 10:51:14 +00:00
github-actions[bot]
05d433e216 Update installed dependencies for Python 3.11 on macos-latest 2026-01-09 10:21:49 +00:00
Li Jiang
41f51297c6 Init installed dependencies 2026-01-09 09:42:36 +00:00
Li Jiang
1285700d7a Update readme, bump version to 2.4.0, fix CI errors (#1466)
* Update gitignore

* Bump version to 2.4.0

* Update readme

* Pre-download california housing data

* Use pre-downloaded california housing data

* Pin lightning<=2.5.6

* Fix typo in find and replace

* Fix estimators has no attribute __sklearn_tags__

* Pin torch to 2.2.2 in tests

* Fix conflict

* Update pytorch-forecasting

* Update pytorch-forecasting

* Update pytorch-forecasting

* Use numpy<2 for testing

* Update scikit-learn

* Run Build and UT every other day

* Pin pip<24.1

* Pin pip<24.1 in pipeline

* Loosen pip, install pytorch_forecasting only in py311

* Add support to new versions of nlp dependecies

* Fix formats

* Remove redefinition

* Update mlflow versions

* Fix mlflow version syntax

* Update gitignore

* Clean up cache to free space

* Remove clean up action cache

* Fix blendsearch

* Update test workflow

* Update setup.py

* Fix catboost version

* Update workflow

* Prepare for python 3.14

* Support no catboost

* Fix tests

* Fix python_requires

* Update test workflow

* Fix vw tests

* Remove python 3.9

* Fix nlp tests

* Fix prophet

* Print pip freeze for better debugging

* Fix Optuna search does not support parameters of type Float with samplers of type Quantized

* Save dependencies for later inspection

* Fix coverage.xml not exists

* Fix github action permission

* Handle python 3.13

* Address openml is not installed

* Check dependencies before run tests

* Update dependencies

* Fix syntax error

* Use bash

* Update dependencies

* Fix git error

* Loose mlflow constraints

* Add rerun, use mlflow-skinny

* Fix git error

* Remove ray tests

* Update xgboost versions

* Fix automl pickle error

* Don't test python 3.10 on macos as it's stuck

* Rebase before push

* Reduce number of branches
2026-01-09 13:40:52 +08:00
dependabot[bot]
7f42bece89 Bump algoliasearch-helper from 3.11.1 to 3.26.0 in /website (#1461)
* Bump algoliasearch-helper from 3.11.1 to 3.26.0 in /website

Bumps [algoliasearch-helper](https://github.com/algolia/instantsearch) from 3.11.1 to 3.26.0.
- [Release notes](https://github.com/algolia/instantsearch/releases)
- [Commits](https://github.com/algolia/instantsearch/commits/algoliasearch-helper@3.26.0)

---
updated-dependencies:
- dependency-name: algoliasearch-helper
  dependency-version: 3.26.0
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

* Fix format error

* Fix format error

---------

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Li Jiang <lijiang1@microsoft.com>
2025-10-09 14:37:31 +08:00
Keita Onabuta
e19107407b update loc second args - column (#1458)
Configure second args of loc function to time_col instead of dataframe - X.
2025-08-30 11:07:19 +08:00
Li Jiang
f5d6693253 Bump version to 2.3.7 (#1457) 2025-08-26 14:59:32 +08:00
Azamatkhan Arifkhanov
d4e43c50a2 Fix OSError: [Errno 24] Too many open files: 'nul' (#1455)
* Update model.py

Added closing of save_fds.

* Updated model.py for pre-commit requirements
2025-08-26 12:50:22 +08:00
dependabot[bot]
13aec414ea Bump brace-expansion from 1.1.11 to 1.1.12 in /website (#1453)
Bumps [brace-expansion](https://github.com/juliangruber/brace-expansion) from 1.1.11 to 1.1.12.
- [Release notes](https://github.com/juliangruber/brace-expansion/releases)
- [Commits](https://github.com/juliangruber/brace-expansion/compare/1.1.11...v1.1.12)

---
updated-dependencies:
- dependency-name: brace-expansion
  dependency-version: 1.1.12
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Li Jiang <bnujli@gmail.com>
2025-08-14 10:50:51 +08:00
Li Jiang
bb16dcde93 Bump version to 2.3.6 (#1451) 2025-08-05 14:29:36 +08:00
Li Jiang
be81a76da9 Fix TypeError of customized kfold method which needs 'y' (#1450) 2025-08-02 08:05:50 +08:00
Li Jiang
2d16089529 Improve FAQ docs (#1448)
* Fix settings usage error

* Add new code example
2025-07-09 18:33:10 +08:00
Li Jiang
01c3c83653 Install wheel and setuptools (#1443) 2025-05-28 12:56:48 +08:00
Li Jiang
9b66103f7c Fix typo, add quotes to python-version (#1442) 2025-05-28 12:24:00 +08:00
Li Jiang
48dfd72e64 Fix CD actions (#1441)
* Fix CD actions

* Skip Build if no relevant changes
2025-05-28 10:45:27 +08:00
Li Jiang
dec92e5b02 Upgrade python 3.8 to 3.10 in github actions (#1440) 2025-05-27 21:34:21 +08:00
Li Jiang
22911ea1ef Merged PR 1685054: Add more logs and function wait_futures for easier post analysis (#1438)
- Add function wait_futures for easier post analysis
- Use logger instead of print

----
#### AI description  (iteration 1)
#### PR Classification
A code enhancement for debugging asynchronous mlflow logging and improving post-run analysis.

#### PR Summary
This PR adds detailed debug logging to the mlflow integration and introduces a new `wait_futures` function to streamline the collection of asynchronous task results for improved analysis.
- `flaml/fabric/mlflow.py`: Added debug log statements around starting and ending mlflow runs to trace run IDs and execution flow.
- `flaml/automl/automl.py`: Implemented the `wait_futures` function to handle asynchronous task results and replaced a print call with `logger.info` for consistent logging.
<!-- GitOpsUserAgent=GitOps.Apps.Server.pullrequestcopilot -->

Related work items: #4029592
2025-05-27 15:32:56 +08:00
murunlin
12183e5f73 Add the detailed info for parameter 'verbose' (#1435)
* explain-verbose-parameter

* concise-verbose-docstring

* explain-verbose-parameter

* explain-verbose-parameter

* test-ignore

* test-ignore

* sklearn-version-califonia

* submit-0526

---------

Co-authored-by: Runlin Mu (FESCO Adecco Human Resources) <v-runlinmu@microsoft.com>
Co-authored-by: Li Jiang <bnujli@gmail.com>
2025-05-27 10:01:01 +08:00
Li Jiang
c2b25310fc Sync Fabric till 2cd1c3da (#1433)
* Sync Fabric till 2cd1c3da

* Remove synapseml from tag names

* Fix 'NoneType' object has no attribute 'DataFrame'

* Deprecated 3.8 support

* Fix 'NoneType' object has no attribute 'DataFrame'

* Still use python 3.8 for pydoc

* Don't run tests in parallel

* Remove autofe and lowcode
2025-05-23 10:19:31 +08:00
murunlin
0f9420590d fix: best_model_for_estimator returns inconsistent feature_importances_ compared to automl.model (#1429)
* mrl-issue1422-0513

* fix version dependency

* fix datasets version

* test completion

---------

Co-authored-by: Runlin Mu (FESCO Adecco Human Resources) <v-runlinmu@microsoft.com>
Co-authored-by: Li Jiang <bnujli@gmail.com>
2025-05-15 09:37:34 +08:00
hexiang-x
5107c506b4 fix:When use_spark = True and mlflow_logging = True are set, an error is reported when logging the best model: 'NoneType' object has no attribute 'save' bug Something isn't working (#1432) 2025-05-14 19:34:06 +08:00
dependabot[bot]
9e219ef8dc Bump http-proxy-middleware from 2.0.7 to 2.0.9 in /website (#1425)
Bumps [http-proxy-middleware](https://github.com/chimurai/http-proxy-middleware) from 2.0.7 to 2.0.9.
- [Release notes](https://github.com/chimurai/http-proxy-middleware/releases)
- [Changelog](https://github.com/chimurai/http-proxy-middleware/blob/v2.0.9/CHANGELOG.md)
- [Commits](https://github.com/chimurai/http-proxy-middleware/compare/v2.0.7...v2.0.9)

---
updated-dependencies:
- dependency-name: http-proxy-middleware
  dependency-version: 2.0.9
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Li Jiang <bnujli@gmail.com>
2025-04-23 14:22:12 +08:00
Li Jiang
6e4083743b Revert "Numpy 2.x is not supported yet. (#1424)" (#1426)
This reverts commit 17e95edd9e.
2025-04-22 21:31:44 +08:00
Li Jiang
17e95edd9e Numpy 2.x is not supported yet. (#1424) 2025-04-22 12:11:27 +08:00
Stickic-cyber
468bc62d27 Fix issue with "list index out of range" when max_iter=1 (#1419) 2025-04-09 21:54:17 +08:00
dependabot[bot]
437c239c11 Bump @babel/helpers from 7.20.1 to 7.26.10 in /website (#1413)
Bumps [@babel/helpers](https://github.com/babel/babel/tree/HEAD/packages/babel-helpers) from 7.20.1 to 7.26.10.
- [Release notes](https://github.com/babel/babel/releases)
- [Changelog](https://github.com/babel/babel/blob/main/CHANGELOG.md)
- [Commits](https://github.com/babel/babel/commits/v7.26.10/packages/babel-helpers)

---
updated-dependencies:
- dependency-name: "@babel/helpers"
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Li Jiang <bnujli@gmail.com>
2025-03-14 15:51:06 +08:00
dependabot[bot]
8e753f1092 Bump @babel/runtime from 7.20.1 to 7.26.10 in /website (#1414)
Bumps [@babel/runtime](https://github.com/babel/babel/tree/HEAD/packages/babel-runtime) from 7.20.1 to 7.26.10.
- [Release notes](https://github.com/babel/babel/releases)
- [Changelog](https://github.com/babel/babel/blob/main/CHANGELOG.md)
- [Commits](https://github.com/babel/babel/commits/v7.26.10/packages/babel-runtime)

---
updated-dependencies:
- dependency-name: "@babel/runtime"
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Li Jiang <bnujli@gmail.com>
2025-03-13 21:34:02 +08:00
dependabot[bot]
a3b57e11d4 Bump prismjs from 1.29.0 to 1.30.0 in /website (#1411)
Bumps [prismjs](https://github.com/PrismJS/prism) from 1.29.0 to 1.30.0.
- [Release notes](https://github.com/PrismJS/prism/releases)
- [Changelog](https://github.com/PrismJS/prism/blob/master/CHANGELOG.md)
- [Commits](https://github.com/PrismJS/prism/compare/v1.29.0...v1.30.0)

---
updated-dependencies:
- dependency-name: prismjs
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Li Jiang <bnujli@gmail.com>
2025-03-13 14:06:41 +08:00
dependabot[bot]
a80dcf9925 Bump @babel/runtime-corejs3 from 7.20.1 to 7.26.10 in /website (#1412)
Bumps [@babel/runtime-corejs3](https://github.com/babel/babel/tree/HEAD/packages/babel-runtime-corejs3) from 7.20.1 to 7.26.10.
- [Release notes](https://github.com/babel/babel/releases)
- [Changelog](https://github.com/babel/babel/blob/main/CHANGELOG.md)
- [Commits](https://github.com/babel/babel/commits/v7.26.10/packages/babel-runtime-corejs3)

---
updated-dependencies:
- dependency-name: "@babel/runtime-corejs3"
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-13 10:04:03 +08:00
SkBlaz
7157af44e0 Improved error handling in case no scikit present (#1402)
* Improved error handling in case no scikit present

Currently there is no description for when this error is thrown. Being explicit seems of value.

* Update histgb.py

---------

Co-authored-by: Li Jiang <bnujli@gmail.com>
2025-03-03 15:39:43 +08:00
Li Jiang
1798c4591e Upgrade setuptools (#1410) 2025-03-01 08:05:51 +08:00
Li Jiang
dd26263330 Bump version to 2.3.5 (#1409) 2025-02-17 22:26:59 +08:00
Li Jiang
2ba5f8bed1 Fix params pop error (#1408) 2025-02-17 15:06:05 +08:00
Daniel Grindrod
d0a11958a5 fix: Fixed bug where group folds and sample weights couldn't be used in the same automl instance (#1405) 2025-02-15 10:41:27 +08:00
dependabot[bot]
0ef9b00a75 Bump serialize-javascript from 6.0.0 to 6.0.2 in /website (#1407)
Bumps [serialize-javascript](https://github.com/yahoo/serialize-javascript) from 6.0.0 to 6.0.2.
- [Release notes](https://github.com/yahoo/serialize-javascript/releases)
- [Commits](https://github.com/yahoo/serialize-javascript/compare/v6.0.0...v6.0.2)

---
updated-dependencies:
- dependency-name: serialize-javascript
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Li Jiang <bnujli@gmail.com>
2025-02-14 12:36:49 +08:00
Will Charles
840f76e5e5 Changed tune.report import for ray>=2 (#1392)
* Changed tune.report import for ray>=2

* env: Changed pydantic restriction in env

* Reverted Pydantic install conditions

* Reverted Pydantic install conditions

* test: Check if GPU is available

* tests: uncommented a line

* tests: Better fix for Ray GPU checking

* tests: Added timeout to dataset loading

* tests: Deleted _test_hf_data()

* test: Reduce lrl2 dataset size

* bug: timeout error

* bug: timeout error

* fix: Added threading check for timout issue

* Undo old commits

* Timeout fix from #1406

---------

Co-authored-by: Daniel Grindrod <dannycg1996@gmail.com>
2025-02-14 09:38:33 +08:00
Li Jiang
d8b7d25b80 Fix test hang issue (#1406)
* Add try except to resource.setrlimit

* Set time limit only in main thread

* Check only test model

* Pytest debug

* Test separately

* Move test_model.py to automl folder
2025-02-13 19:50:35 +08:00
Li Jiang
6d53929803 Bump version to 2.3.4 (#1389) 2024-12-18 12:49:59 +08:00
Daniel Grindrod
c038fbca07 fix: KeyError no longer occurs when using groupfolds for regression tasks. (#1385)
* fix: Now resetting indexes for regression datasets when using group folds

* refactor: Simplified if statement to include all fold types

* docs: Updated docs to make it clear that group folds can be used for regression tasks

---------

Co-authored-by: Daniel Grindrod <daniel.grindrod@evotec.com>
Co-authored-by: Li Jiang <bnujli@gmail.com>
2024-12-18 10:06:58 +08:00
dependabot[bot]
6a99202492 Bump nanoid from 3.3.6 to 3.3.8 in /website (#1387)
Bumps [nanoid](https://github.com/ai/nanoid) from 3.3.6 to 3.3.8.
- [Release notes](https://github.com/ai/nanoid/releases)
- [Changelog](https://github.com/ai/nanoid/blob/main/CHANGELOG.md)
- [Commits](https://github.com/ai/nanoid/compare/3.3.6...3.3.8)

---
updated-dependencies:
- dependency-name: nanoid
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Li Jiang <bnujli@gmail.com>
2024-12-17 19:26:34 +08:00
Daniel Grindrod
42d1dcfa0e fix: Fixed bug with catboost and groups (#1383)
Co-authored-by: Daniel Grindrod <daniel.grindrod@evotec.com>
2024-12-17 13:54:49 +08:00
EgorKraevTransferwise
b83c8a7d3b Pass cost_attr and cost_budget from flaml.tune.run() to the search algo (#1382) 2024-12-04 20:50:15 +08:00
dependabot[bot]
b9194cdcf2 Bump cross-spawn from 7.0.3 to 7.0.6 in /website (#1379)
Bumps [cross-spawn](https://github.com/moxystudio/node-cross-spawn) from 7.0.3 to 7.0.6.
- [Changelog](https://github.com/moxystudio/node-cross-spawn/blob/master/CHANGELOG.md)
- [Commits](https://github.com/moxystudio/node-cross-spawn/compare/v7.0.3...v7.0.6)

---
updated-dependencies:
- dependency-name: cross-spawn
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-20 15:48:39 +08:00
Li Jiang
9a1f6b0291 Bump version to 2.3.3 (#1378) 2024-11-13 11:44:34 +08:00
kernelmethod
07f4413aae Fix logging nuisances that can arise when importing flaml (#1377) 2024-11-13 07:49:55 +08:00
90 changed files with 51476 additions and 473 deletions

View File

@@ -12,26 +12,17 @@ jobs:
deploy:
strategy:
matrix:
os: ['ubuntu-latest']
python-version: [3.8]
os: ["ubuntu-latest"]
python-version: ["3.10"]
runs-on: ${{ matrix.os }}
environment: package
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Cache conda
uses: actions/cache@v3
uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
path: ~/conda_pkgs_dir
key: conda-${{ matrix.os }}-python-${{ matrix.python-version }}-${{ hashFiles('environment.yml') }}
- name: Setup Miniconda
uses: conda-incubator/setup-miniconda@v2
with:
auto-update-conda: true
auto-activate-base: false
activate-environment: hcrystalball
python-version: ${{ matrix.python-version }}
use-only-tar-bz2: true
- name: Install from source
# This is required for the pre-commit tests
shell: pwsh
@@ -42,7 +33,7 @@ jobs:
- name: Build
shell: pwsh
run: |
pip install twine
pip install twine wheel setuptools
python setup.py sdist bdist_wheel
- name: Publish to PyPI
env:

View File

@@ -37,11 +37,11 @@ jobs:
- name: setup python
uses: actions/setup-python@v4
with:
python-version: "3.8"
python-version: "3.10"
- name: pydoc-markdown install
run: |
python -m pip install --upgrade pip
pip install pydoc-markdown==4.5.0
pip install pydoc-markdown==4.7.0
- name: pydoc-markdown run
run: |
pydoc-markdown
@@ -73,11 +73,11 @@ jobs:
- name: setup python
uses: actions/setup-python@v4
with:
python-version: "3.8"
python-version: "3.10"
- name: pydoc-markdown install
run: |
python -m pip install --upgrade pip
pip install pydoc-markdown==4.5.0
pip install pydoc-markdown==4.7.0
- name: pydoc-markdown run
run: |
pydoc-markdown

View File

@@ -14,10 +14,20 @@ on:
- 'setup.py'
pull_request:
branches: ['main']
paths:
- 'flaml/**'
- 'test/**'
- 'notebook/**'
- '.github/workflows/python-package.yml'
- 'setup.py'
merge_group:
types: [checks_requested]
schedule:
# Every other day at 02:00 UTC
- cron: '0 2 */2 * *'
permissions: {}
permissions:
contents: write
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
@@ -29,8 +39,11 @@ jobs:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-2019]
python-version: ["3.8", "3.9", "3.10", "3.11"]
os: [ubuntu-latest, macos-latest, windows-latest]
python-version: ["3.10", "3.11"]
exclude:
- os: macos-latest
python-version: "3.10"
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
@@ -38,7 +51,7 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
- name: On mac, install libomp to facilitate lgbm and xgboost install
if: matrix.os == 'macOS-latest'
if: matrix.os == 'macos-latest'
run: |
brew update
brew install libomp
@@ -50,7 +63,7 @@ jobs:
export LDFLAGS="$LDFLAGS -Wl,-rpath,/usr/local/opt/libomp/lib -L/usr/local/opt/libomp/lib -lomp"
- name: Install packages and dependencies
run: |
python -m pip install --upgrade pip wheel
python -m pip install --upgrade pip wheel setuptools
pip install -e .
python -c "import flaml"
pip install -e .[test]
@@ -64,33 +77,43 @@ jobs:
run: |
pip install pyspark==3.5.1
pip list | grep "pyspark"
- name: If linux and python<3.11, install ray 2
if: matrix.os == 'ubuntu-latest' && matrix.python-version != '3.11'
- name: On Ubuntu python 3.12, install pyspark 4.0.1
if: matrix.python-version == '3.12' && matrix.os == 'ubuntu-latest'
run: |
pip install "ray[tune]<2.5.0"
- name: If mac and python 3.10, install ray and xgboost 1
if: matrix.os == 'macOS-latest' && matrix.python-version == '3.10'
run: |
pip install -e .[ray]
# use macOS to test xgboost 1, but macOS also supports xgboost 2
pip install "xgboost<2"
- name: If linux, install prophet on python < 3.9
if: matrix.os == 'ubuntu-latest' && matrix.python-version == '3.8'
pip install pyspark==4.0.1
pip list | grep "pyspark"
# # TODO: support ray
# - name: If linux and python<3.11, install ray 2
# if: matrix.os == 'ubuntu-latest' && matrix.python-version < '3.11'
# run: |
# pip install "ray[tune]<2.5.0"
- name: Install prophet when on linux
if: matrix.os == 'ubuntu-latest'
run: |
pip install -e .[forecast]
- name: Install vw on python < 3.10
if: matrix.python-version == '3.8' || matrix.python-version == '3.9'
# TODO: support vw for python 3.10+
- name: If linux and python<3.10, install vw
if: matrix.os == 'ubuntu-latest' && matrix.python-version < '3.10'
run: |
pip install -e .[vw]
- name: Pip freeze
run: |
pip freeze
- name: Check dependencies
run: |
python test/check_dependency.py
- name: Clear pip cache
run: |
pip cache purge
- name: Test with pytest
if: matrix.python-version != '3.10'
run: |
pytest test
pytest test/ --ignore=test/autogen --reruns 2 --reruns-delay 10
- name: Coverage
if: matrix.python-version == '3.10'
run: |
pip install coverage
coverage run -a -m pytest test
coverage run -a -m pytest test --ignore=test/autogen --reruns 2 --reruns-delay 10
coverage xml
- name: Upload coverage to Codecov
if: matrix.python-version == '3.10'
@@ -98,28 +121,24 @@ jobs:
with:
file: ./coverage.xml
flags: unittests
- name: Save dependencies
shell: bash
run: |
git config --global user.name 'github-actions[bot]'
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
git config advice.addIgnoredFile false
# docs:
BRANCH=unit-tests-installed-dependencies
git fetch origin
git checkout -B "$BRANCH"
if git show-ref --verify --quiet "refs/remotes/origin/$BRANCH"; then
git rebase "origin/$BRANCH"
fi
# runs-on: ubuntu-latest
# steps:
# - uses: actions/checkout@v3
# - name: Setup Python
# uses: actions/setup-python@v4
# with:
# python-version: '3.8'
# - name: Compile documentation
# run: |
# pip install -e .
# python -m pip install sphinx sphinx_rtd_theme
# cd docs
# make html
# - name: Deploy to GitHub pages
# if: ${{ github.ref == 'refs/heads/main' }}
# uses: JamesIves/github-pages-deploy-action@3.6.2
# with:
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# BRANCH: gh-pages
# FOLDER: docs/_build/html
# CLEAN: true
pip freeze > installed_all_dependencies_${{ matrix.python-version }}_${{ matrix.os }}.txt
python test/check_dependency.py > installed_first_tier_dependencies_${{ matrix.python-version }}_${{ matrix.os }}.txt
git add installed_*dependencies*.txt
mv coverage.xml ./coverage_${{ matrix.python-version }}_${{ matrix.os }}.xml || true
git add -f ./coverage_${{ matrix.python-version }}_${{ matrix.os }}.xml || true
git commit -m "Update installed dependencies for Python ${{ matrix.python-version }} on ${{ matrix.os }}" || exit 0
git push origin "$BRANCH"

6
.gitignore vendored
View File

@@ -172,7 +172,7 @@ test/default
test/housing.json
test/nlp/default/transformer_ms/seq-classification.json
flaml/fabric/fanova/_fanova.c
flaml/fabric/fanova/*fanova.c
# local config files
*.config.local
@@ -184,3 +184,7 @@ notebook/lightning_logs/
lightning_logs/
flaml/autogen/extensions/tmp/
test/autogen/my_tmp/
catboost_*
# Internal configs
.pypirc

View File

@@ -1,5 +1,5 @@
# basic setup
FROM mcr.microsoft.com/devcontainers/python:3.8
FROM mcr.microsoft.com/devcontainers/python:3.10
RUN apt-get update && apt-get -y update
RUN apt-get install -y sudo git npm

View File

@@ -14,15 +14,9 @@
<br>
</p>
:fire: FLAML supports AutoML and Hyperparameter Tuning in [Microsoft Fabric Data Science](https://learn.microsoft.com/en-us/fabric/data-science/automated-machine-learning-fabric). In addition, we've introduced Python 3.11 support, along with a range of new estimators, and comprehensive integration with MLflow—thanks to contributions from the Microsoft Fabric product team.
:fire: FLAML supports AutoML and Hyperparameter Tuning in [Microsoft Fabric Data Science](https://learn.microsoft.com/en-us/fabric/data-science/automated-machine-learning-fabric). In addition, we've introduced Python 3.11 and 3.12 support, along with a range of new estimators, and comprehensive integration with MLflow—thanks to contributions from the Microsoft Fabric product team.
:fire: Heads-up: We have migrated [AutoGen](https://microsoft.github.io/autogen/) into a dedicated [github repository](https://github.com/microsoft/autogen). Alongside this move, we have also launched a dedicated [Discord](https://discord.gg/pAbnFJrkgZ) server and a [website](https://microsoft.github.io/autogen/) for comprehensive documentation.
:fire: The automated multi-agent chat framework in [AutoGen](https://microsoft.github.io/autogen/) is in preview from v2.0.0.
:fire: FLAML is highlighted in OpenAI's [cookbook](https://github.com/openai/openai-cookbook#related-resources-from-around-the-web).
:fire: [autogen](https://microsoft.github.io/autogen/) is released with support for ChatGPT and GPT-4, based on [Cost-Effective Hyperparameter Optimization for Large Language Model Generation Inference](https://arxiv.org/abs/2303.04673).
:fire: Heads-up: [AutoGen](https://microsoft.github.io/autogen/) has moved to a dedicated [GitHub repository](https://github.com/microsoft/autogen). FLAML no longer includes the `autogen` module—please use AutoGen directly.
## What is FLAML
@@ -30,7 +24,7 @@ FLAML is a lightweight Python library for efficient automation of machine
learning and AI operations. It automates workflow based on large language models, machine learning models, etc.
and optimizes their performance.
- FLAML enables building next-gen GPT-X applications based on multi-agent conversations with minimal effort. It simplifies the orchestration, automation and optimization of a complex GPT-X workflow. It maximizes the performance of GPT-X models and augments their weakness.
- FLAML enables economical automation and tuning for ML/AI workflows, including model selection and hyperparameter optimization under resource constraints.
- For common machine learning tasks like classification and regression, it quickly finds quality models for user-provided data with low computational resources. It is easy to customize or extend. Users can find their desired customizability from a smooth range.
- It supports fast and economical automatic tuning (e.g., inference hyperparameters for foundation models, configurations in MLOps/LMOps workflows, pipelines, mathematical/statistical models, algorithms, computing experiments, software configurations), capable of handling large search space with heterogeneous evaluation cost and complex constraints/guidance/early stopping.
@@ -40,16 +34,16 @@ FLAML has a .NET implementation in [ML.NET](http://dot.net/ml), an open-source,
## Installation
FLAML requires **Python version >= 3.8**. It can be installed from pip:
FLAML requires **Python version >= 3.9**. It can be installed from pip:
```bash
pip install flaml
```
Minimal dependencies are installed without extra options. You can install extra options based on the feature you need. For example, use the following to install the dependencies needed by the [`autogen`](https://microsoft.github.io/autogen/) package.
Minimal dependencies are installed without extra options. You can install extra options based on the feature you need. For example, use the following to install the dependencies needed by the [`automl`](https://microsoft.github.io/FLAML/docs/Use-Cases/Task-Oriented-AutoML) module.
```bash
pip install "flaml[autogen]"
pip install "flaml[automl]"
```
Find more options in [Installation](https://microsoft.github.io/FLAML/docs/Installation).
@@ -57,39 +51,6 @@ Each of the [`notebook examples`](https://github.com/microsoft/FLAML/tree/main/n
## Quickstart
- (New) The [autogen](https://microsoft.github.io/autogen/) package enables the next-gen GPT-X applications with a generic multi-agent conversation framework.
It offers customizable and conversable agents which integrate LLMs, tools and human.
By automating chat among multiple capable agents, one can easily make them collectively perform tasks autonomously or with human feedback, including tasks that require using tools via code. For example,
```python
from flaml import autogen
assistant = autogen.AssistantAgent("assistant")
user_proxy = autogen.UserProxyAgent("user_proxy")
user_proxy.initiate_chat(
assistant,
message="Show me the YTD gain of 10 largest technology companies as of today.",
)
# This initiates an automated chat between the two agents to solve the task
```
Autogen also helps maximize the utility out of the expensive LLMs such as ChatGPT and GPT-4. It offers a drop-in replacement of `openai.Completion` or `openai.ChatCompletion` with powerful functionalites like tuning, caching, templating, filtering. For example, you can optimize generations by LLM with your own tuning data, success metrics and budgets.
```python
# perform tuning
config, analysis = autogen.Completion.tune(
data=tune_data,
metric="success",
mode="max",
eval_func=eval_func,
inference_budget=0.05,
optimization_budget=3,
num_samples=-1,
)
# perform inference for a test instance
response = autogen.Completion.create(context=test_instance, **config)
```
- With three lines of code, you can start using this economical and fast
AutoML engine as a [scikit-learn style estimator](https://microsoft.github.io/FLAML/docs/Use-Cases/Task-Oriented-AutoML).
@@ -111,7 +72,10 @@ automl.fit(X_train, y_train, task="classification", estimator_list=["lgbm"])
```python
from flaml import tune
tune.run(evaluation_function, config={}, low_cost_partial_config={}, time_budget_s=3600)
tune.run(
evaluation_function, config={}, low_cost_partial_config={}, time_budget_s=3600
)
```
- [Zero-shot AutoML](https://microsoft.github.io/FLAML/docs/Use-Cases/Zero-Shot-AutoML) allows using the existing training API from lightgbm, xgboost etc. while getting the benefit of AutoML in choosing high-performance hyperparameter configurations per task.

File diff suppressed because it is too large Load Diff

11527
coverage_3.11_macos-latest.xml Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,5 @@
import logging
import warnings
try:
from flaml.automl import AutoML, logger_formatter
@@ -12,7 +13,8 @@ from flaml.version import __version__
# Set the root logger.
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
if logger.level == logging.NOTSET:
logger.setLevel(logging.INFO)
if not has_automl:
logger.warning("flaml.automl is not available. Please install flaml[automl] to enable AutoML functionalities.")
warnings.warn("flaml.automl is not available. Please install flaml[automl] to enable AutoML functionalities.")

View File

@@ -156,7 +156,7 @@ class MathUserProxyAgent(UserProxyAgent):
when the number of auto reply reaches the max_consecutive_auto_reply or when is_termination_msg is True.
default_auto_reply (str or dict or None): the default auto reply message when no code execution or llm based reply is generated.
max_invalid_q_per_step (int): (ADDED) the maximum number of invalid queries per step.
**kwargs (dict): other kwargs in [UserProxyAgent](user_proxy_agent#__init__).
**kwargs (dict): other kwargs in [UserProxyAgent](../user_proxy_agent#__init__).
"""
super().__init__(
name=name,

View File

@@ -123,7 +123,7 @@ class RetrieveUserProxyAgent(UserProxyAgent):
can be found at `https://www.sbert.net/docs/pretrained_models.html`. The default model is a
fast model. If you want to use a high performance model, `all-mpnet-base-v2` is recommended.
- customized_prompt (Optional, str): the customized prompt for the retrieve chat. Default is None.
**kwargs (dict): other kwargs in [UserProxyAgent](user_proxy_agent#__init__).
**kwargs (dict): other kwargs in [UserProxyAgent](../user_proxy_agent#__init__).
"""
super().__init__(
name=name,

View File

@@ -10,6 +10,7 @@ import os
import random
import sys
import time
from concurrent.futures import as_completed
from functools import partial
from typing import Callable, List, Optional, Union
@@ -187,9 +188,16 @@ class AutoML(BaseEstimator):
mem_thres: A float of the memory size constraint in bytes.
pred_time_limit: A float of the prediction latency constraint in seconds.
It refers to the average prediction time per row in validation data.
train_time_limit: A float of the training time constraint in seconds.
train_time_limit: None or a float of the training time constraint in seconds for each trial.
Only valid for sequential search.
verbose: int, default=3 | Controls the verbosity, higher means more
messages.
verbose=0: logger level = CRITICAL
verbose=1: logger level = ERROR
verbose=2: logger level = WARNING
verbose=3: logger level = INFO
verbose=4: logger level = DEBUG
verbose>5: logger level = NOTSET
retrain_full: bool or str, default=True | whether to retrain the
selected model on the full training data when using holdout.
True - retrain only after search finishes; False - no retraining;
@@ -203,7 +211,7 @@ class AutoML(BaseEstimator):
* Valid str options depend on different tasks.
For classification tasks, valid choices are
["auto", 'stratified', 'uniform', 'time', 'group']. "auto" -> stratified.
For regression tasks, valid choices are ["auto", 'uniform', 'time'].
For regression tasks, valid choices are ["auto", 'uniform', 'time', 'group'].
"auto" -> uniform.
For time series forecast tasks, must be "auto" or 'time'.
For ranking task, must be "auto" or 'group'.
@@ -393,6 +401,24 @@ class AutoML(BaseEstimator):
self._estimator_type = "classifier" if settings["task"] in CLASSIFICATION else "regressor"
self.best_run_id = None
def __getstate__(self):
"""Customize pickling to avoid serializing runtime-only objects.
MLflow's sklearn flavor serializes estimators via (cloud)pickle. During
AutoML fitting we may attach an internal mlflow integration instance
which holds `concurrent.futures.Future` objects and executors containing
thread locks, which are not picklable.
"""
state = self.__dict__.copy()
state.pop("mlflow_integration", None)
return state
def __setstate__(self, state):
self.__dict__.update(state)
# Ensure attribute exists post-unpickle.
self.mlflow_integration = None
def get_params(self, deep: bool = False) -> dict:
return self._settings.copy()
@@ -424,6 +450,8 @@ class AutoML(BaseEstimator):
If `model_history` was set to True, then the returned model is trained.
"""
state = self._search_states.get(estimator_name)
if state and estimator_name == self._best_estimator:
return self.model
return state and getattr(state, "trained_estimator", None)
@property
@@ -739,7 +767,7 @@ class AutoML(BaseEstimator):
* Valid str options depend on different tasks.
For classification tasks, valid choices are
["auto", 'stratified', 'uniform', 'time', 'group']. "auto" -> stratified.
For regression tasks, valid choices are ["auto", 'uniform', 'time'].
For regression tasks, valid choices are ["auto", 'uniform', 'time', 'group'].
"auto" -> uniform.
For time series forecast tasks, must be "auto" or 'time'.
For ranking task, must be "auto" or 'group'.
@@ -1332,7 +1360,8 @@ class AutoML(BaseEstimator):
mem_thres: A float of the memory size constraint in bytes.
pred_time_limit: A float of the prediction latency constraint in seconds.
It refers to the average prediction time per row in validation data.
train_time_limit: None or a float of the training time constraint in seconds.
train_time_limit: None or a float of the training time constraint in seconds for each trial.
Only valid for sequential search.
X_val: None or a numpy array or a pandas dataframe of validation data.
y_val: None or a numpy array or a pandas series of validation labels.
sample_weight_val: None or a numpy array of the sample weight of
@@ -1345,6 +1374,12 @@ class AutoML(BaseEstimator):
for training data.
verbose: int, default=3 | Controls the verbosity, higher means more
messages.
verbose=0: logger level = CRITICAL
verbose=1: logger level = ERROR
verbose=2: logger level = WARNING
verbose=3: logger level = INFO
verbose=4: logger level = DEBUG
verbose>5: logger level = NOTSET
retrain_full: bool or str, default=True | whether to retrain the
selected model on the full training data when using holdout.
True - retrain only after search finishes; False - no retraining;
@@ -1358,7 +1393,7 @@ class AutoML(BaseEstimator):
* Valid str options depend on different tasks.
For classification tasks, valid choices are
["auto", 'stratified', 'uniform', 'time', 'group']. "auto" -> stratified.
For regression tasks, valid choices are ["auto", 'uniform', 'time'].
For regression tasks, valid choices are ["auto", 'uniform', 'time', 'group'].
"auto" -> uniform.
For time series forecast tasks, must be "auto" or 'time'.
For ranking task, must be "auto" or 'group'.
@@ -1623,6 +1658,13 @@ class AutoML(BaseEstimator):
_ch.setFormatter(logger_formatter)
logger.addHandler(_ch)
if model_history:
logger.warning(
"With `model_history` set to `True` by default, all intermediate models are retained in memory, "
"which may significantly increase memory usage and slow down training. "
"Consider setting `model_history=False` to optimize memory and accelerate the training process."
)
if not use_ray and not use_spark and n_concurrent_trials > 1:
if ray_available:
logger.warning(
@@ -1708,7 +1750,7 @@ class AutoML(BaseEstimator):
if not (mlflow.active_run() is not None or is_autolog_enabled()):
self.mlflow_integration.only_history = True
except KeyError:
print("Not in Fabric, Skipped")
logger.info("Not in Fabric, Skipped")
task.validate_data(
self,
self._state,
@@ -2529,6 +2571,21 @@ class AutoML(BaseEstimator):
self._selected = state = self._search_states[estimator]
state.best_config_sample_size = self._state.data_size[0]
state.best_config = state.init_config[0] if state.init_config else {}
self._track_iter = 0
self._config_history[self._track_iter] = (estimator, state.best_config, self._state.time_from_start)
self._best_iteration = self._track_iter
state.val_loss = getattr(state, "val_loss", float("inf"))
state.best_loss = getattr(state, "best_loss", float("inf"))
state.config = getattr(state, "config", state.best_config.copy())
state.metric_for_logging = getattr(state, "metric_for_logging", None)
state.sample_size = getattr(state, "sample_size", self._state.data_size[0])
state.learner_class = getattr(state, "learner_class", self._state.learner_classes.get(estimator))
if hasattr(self, "mlflow_integration") and self.mlflow_integration:
self.mlflow_integration.record_state(
automl=self,
search_state=state,
estimator=estimator,
)
elif self._use_ray is False and self._use_spark is False:
self._search_sequential()
else:
@@ -2700,16 +2757,47 @@ class AutoML(BaseEstimator):
):
if mlflow.active_run() is None:
mlflow.start_run(run_id=self.mlflow_integration.parent_run_id)
self.mlflow_integration.log_model(
self._trained_estimator.model,
self.best_estimator,
signature=self.estimator_signature,
)
self.mlflow_integration.pickle_and_log_automl_artifacts(
self, self.model, self.best_estimator, signature=self.pipeline_signature
)
if self.best_estimator.endswith("_spark"):
self.mlflow_integration.log_model(
self._trained_estimator.model,
self.best_estimator,
signature=self.estimator_signature,
run_id=self.mlflow_integration.parent_run_id,
)
else:
self.mlflow_integration.pickle_and_log_automl_artifacts(
self,
self.model,
self.best_estimator,
signature=self.pipeline_signature,
run_id=self.mlflow_integration.parent_run_id,
)
else:
logger.info("not retraining because the time budget is too small.")
logger.warning("not retraining because the time budget is too small.")
self.wait_futures()
def wait_futures(self):
if self.mlflow_integration is not None:
logger.debug("Collecting results from submitted record_state tasks")
t1 = time.perf_counter()
for future in as_completed(self.mlflow_integration.futures):
_task = self.mlflow_integration.futures[future]
try:
result = future.result()
logger.debug(f"Result for record_state task {_task}: {result}")
except Exception as e:
logger.warning(f"Exception for record_state task {_task}: {e}")
for future in as_completed(self.mlflow_integration.futures_log_model):
_task = self.mlflow_integration.futures_log_model[future]
try:
result = future.result()
logger.debug(f"Result for log_model task {_task}: {result}")
except Exception as e:
logger.warning(f"Exception for log_model task {_task}: {e}")
t2 = time.perf_counter()
logger.debug(f"Collecting results from tasks submitted to executors costs {t2-t1} seconds.")
else:
logger.debug("No futures to wait for.")
def __del__(self):
if (

View File

@@ -1,7 +1,7 @@
try:
from sklearn.ensemble import HistGradientBoostingClassifier, HistGradientBoostingRegressor
except ImportError:
pass
except ImportError as e:
print(f"scikit-learn is required for HistGradientBoostingEstimator. Please install it; error: {e}")
from flaml import tune
from flaml.automl.model import SKLearnEstimator

View File

@@ -2,13 +2,17 @@
# * Copyright (c) Microsoft Corporation. All rights reserved.
# * Licensed under the MIT License. See LICENSE file in the
# * project root for license information.
import json
import os
from datetime import datetime
import random
import uuid
from datetime import datetime, timedelta
from decimal import ROUND_HALF_UP, Decimal
from typing import TYPE_CHECKING, Union
import numpy as np
from flaml.automl.spark import DataFrame, Series, pd, ps, psDataFrame, psSeries
from flaml.automl.spark import DataFrame, F, Series, T, pd, ps, psDataFrame, psSeries
from flaml.automl.training_log import training_log_reader
try:
@@ -19,6 +23,7 @@ except ImportError:
if TYPE_CHECKING:
from flaml.automl.task import Task
TS_TIMESTAMP_COL = "ds"
TS_VALUE_COL = "y"
@@ -45,7 +50,10 @@ def load_openml_dataset(dataset_id, data_dir=None, random_state=0, dataset_forma
"""
import pickle
import openml
try:
import openml
except ImportError:
openml = None
from sklearn.model_selection import train_test_split
filename = "openml_ds" + str(dataset_id) + ".pkl"
@@ -56,15 +64,15 @@ def load_openml_dataset(dataset_id, data_dir=None, random_state=0, dataset_forma
dataset = pickle.load(f)
else:
print("download dataset from openml")
dataset = openml.datasets.get_dataset(dataset_id)
dataset = openml.datasets.get_dataset(dataset_id) if openml else None
if not os.path.exists(data_dir):
os.makedirs(data_dir)
with open(filepath, "wb") as f:
pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
print("Dataset name:", dataset.name)
print("Dataset name:", dataset.name) if dataset else None
try:
X, y, *__ = dataset.get_data(target=dataset.default_target_attribute, dataset_format=dataset_format)
except ValueError:
except (ValueError, AttributeError, TypeError):
from sklearn.datasets import fetch_openml
X, y = fetch_openml(data_id=dataset_id, return_X_y=True)
@@ -445,3 +453,331 @@ class DataTransformer:
def group_counts(groups):
_, i, c = np.unique(groups, return_counts=True, return_index=True)
return c[np.argsort(i)]
def get_random_dataframe(n_rows: int = 200, ratio_none: float = 0.1, seed: int = 42) -> DataFrame:
"""Generate a random pandas DataFrame with various data types for testing.
This function creates a DataFrame with multiple column types including:
- Timestamps
- Integers
- Floats
- Categorical values
- Booleans
- Lists (tags)
- Decimal strings
- UUIDs
- Binary data (as hex strings)
- JSON blobs
- Nullable text fields
Parameters
----------
n_rows : int, default=200
Number of rows in the generated DataFrame
ratio_none : float, default=0.1
Probability of generating None values in applicable columns
seed : int, default=42
Random seed for reproducibility
Returns
-------
pd.DataFrame
A DataFrame with 14 columns of various data types
Examples
--------
>>> df = get_random_dataframe(100, 0.05, 123)
>>> df.shape
(100, 14)
>>> df.dtypes
timestamp datetime64[ns]
id int64
score float64
status object
flag object
count object
value object
tags object
rating object
uuid object
binary object
json_blob object
category category
nullable_text object
dtype: object
"""
np.random.seed(seed)
random.seed(seed)
def random_tags():
tags = ["AI", "ML", "data", "robotics", "vision"]
return random.sample(tags, k=random.randint(1, 3)) if random.random() > ratio_none else None
def random_decimal():
return (
str(Decimal(random.uniform(1, 5)).quantize(Decimal("0.01"), rounding=ROUND_HALF_UP))
if random.random() > ratio_none
else None
)
def random_json_blob():
blob = {"a": random.randint(1, 10), "b": random.random()}
return json.dumps(blob) if random.random() > ratio_none else None
def random_binary():
return bytes(random.randint(0, 255) for _ in range(4)).hex() if random.random() > ratio_none else None
data = {
"timestamp": [
datetime(2020, 1, 1) + timedelta(days=np.random.randint(0, 1000)) if np.random.rand() > ratio_none else None
for _ in range(n_rows)
],
"id": range(1, n_rows + 1),
"score": np.random.uniform(0, 100, n_rows),
"status": np.random.choice(
["active", "inactive", "pending", None],
size=n_rows,
p=[(1 - ratio_none) / 3, (1 - ratio_none) / 3, (1 - ratio_none) / 3, ratio_none],
),
"flag": np.random.choice(
[True, False, None], size=n_rows, p=[(1 - ratio_none) / 2, (1 - ratio_none) / 2, ratio_none]
),
"count": [np.random.randint(0, 100) if np.random.rand() > ratio_none else None for _ in range(n_rows)],
"value": [round(np.random.normal(50, 15), 2) if np.random.rand() > ratio_none else None for _ in range(n_rows)],
"tags": [random_tags() for _ in range(n_rows)],
"rating": [random_decimal() for _ in range(n_rows)],
"uuid": [str(uuid.uuid4()) if np.random.rand() > ratio_none else None for _ in range(n_rows)],
"binary": [random_binary() for _ in range(n_rows)],
"json_blob": [random_json_blob() for _ in range(n_rows)],
"category": pd.Categorical(
np.random.choice(
["A", "B", "C", None],
size=n_rows,
p=[(1 - ratio_none) / 3, (1 - ratio_none) / 3, (1 - ratio_none) / 3, ratio_none],
)
),
"nullable_text": [random.choice(["Good", "Bad", "Average", None]) for _ in range(n_rows)],
}
return pd.DataFrame(data)
def auto_convert_dtypes_spark(
df: psDataFrame,
na_values: list = None,
category_threshold: float = 0.3,
convert_threshold: float = 0.6,
sample_ratio: float = 0.1,
) -> tuple[psDataFrame, dict]:
"""Automatically convert data types in a PySpark DataFrame using heuristics.
This function analyzes a sample of the DataFrame to infer appropriate data types
and applies the conversions. It handles timestamps, numeric values, booleans,
and categorical fields.
Args:
df: A PySpark DataFrame to convert.
na_values: List of strings to be considered as NA/NaN. Defaults to
['NA', 'na', 'NULL', 'null', ''].
category_threshold: Maximum ratio of unique values to total values
to consider a column categorical. Defaults to 0.3.
convert_threshold: Minimum ratio of successfully converted values required
to apply a type conversion. Defaults to 0.6.
sample_ratio: Fraction of data to sample for type inference. Defaults to 0.1.
Returns:
tuple: (The DataFrame with converted types, A dictionary mapping column names to
their inferred types as strings)
Note:
- 'category' in the schema dict is conceptual as PySpark doesn't have a true
category type like pandas
- The function uses sampling for efficiency with large datasets
"""
n_rows = df.count()
if na_values is None:
na_values = ["NA", "na", "NULL", "null", ""]
# Normalize NA-like values
for colname, coltype in df.dtypes:
if coltype == "string":
df = df.withColumn(
colname,
F.when(F.trim(F.lower(F.col(colname))).isin([v.lower() for v in na_values]), None).otherwise(
F.col(colname)
),
)
schema = {}
for colname in df.columns:
# Sample once at an appropriate ratio
sample_ratio_to_use = min(1.0, sample_ratio if n_rows * sample_ratio > 100 else 100 / n_rows)
col_sample = df.select(colname).sample(withReplacement=False, fraction=sample_ratio_to_use).dropna()
sample_count = col_sample.count()
inferred_type = "string" # Default
if col_sample.dtypes[0][1] != "string":
schema[colname] = col_sample.dtypes[0][1]
continue
if sample_count == 0:
schema[colname] = "string"
continue
# Check if timestamp
ts_col = col_sample.withColumn("parsed", F.to_timestamp(F.col(colname)))
# Check numeric
if (
col_sample.withColumn("n", F.col(colname).cast("double")).filter("n is not null").count()
>= sample_count * convert_threshold
):
# All whole numbers?
all_whole = (
col_sample.withColumn("n", F.col(colname).cast("double"))
.filter("n is not null")
.withColumn("frac", F.abs(F.col("n") % 1))
.filter("frac > 0.000001")
.count()
== 0
)
inferred_type = "int" if all_whole else "double"
# Check low-cardinality (category-like)
elif (
sample_count > 0
and col_sample.select(F.countDistinct(F.col(colname))).collect()[0][0] / sample_count <= category_threshold
):
inferred_type = "category" # Will just be string, but marked as such
# Check if timestamp
elif ts_col.filter(F.col("parsed").isNotNull()).count() >= sample_count * convert_threshold:
inferred_type = "timestamp"
schema[colname] = inferred_type
# Apply inferred schema
for colname, inferred_type in schema.items():
if inferred_type == "int":
df = df.withColumn(colname, F.col(colname).cast(T.IntegerType()))
elif inferred_type == "double":
df = df.withColumn(colname, F.col(colname).cast(T.DoubleType()))
elif inferred_type == "boolean":
df = df.withColumn(
colname,
F.when(F.lower(F.col(colname)).isin("true", "yes", "1"), True)
.when(F.lower(F.col(colname)).isin("false", "no", "0"), False)
.otherwise(None),
)
elif inferred_type == "timestamp":
df = df.withColumn(colname, F.to_timestamp(F.col(colname)))
elif inferred_type == "category":
df = df.withColumn(colname, F.col(colname).cast(T.StringType())) # Marked conceptually
# otherwise keep as string (or original type)
return df, schema
def auto_convert_dtypes_pandas(
df: DataFrame,
na_values: list = None,
category_threshold: float = 0.3,
convert_threshold: float = 0.6,
sample_ratio: float = 1.0,
) -> tuple[DataFrame, dict]:
"""Automatically convert data types in a pandas DataFrame using heuristics.
This function analyzes the DataFrame to infer appropriate data types
and applies the conversions. It handles timestamps, timedeltas, numeric values,
and categorical fields.
Args:
df: A pandas DataFrame to convert.
na_values: List of strings to be considered as NA/NaN. Defaults to
['NA', 'na', 'NULL', 'null', ''].
category_threshold: Maximum ratio of unique values to total values
to consider a column categorical. Defaults to 0.3.
convert_threshold: Minimum ratio of successfully converted values required
to apply a type conversion. Defaults to 0.6.
sample_ratio: Fraction of data to sample for type inference. Not used in pandas version
but included for API compatibility. Defaults to 1.0.
Returns:
tuple: (The DataFrame with converted types, A dictionary mapping column names to
their inferred types as strings)
"""
if na_values is None:
na_values = {"NA", "na", "NULL", "null", ""}
df_converted = df.convert_dtypes()
schema = {}
# Sample if needed (for API compatibility)
if sample_ratio < 1.0:
df = df.sample(frac=sample_ratio)
n_rows = len(df)
for col in df.columns:
series = df[col]
# Replace NA-like values if string
series_cleaned = series.map(lambda x: np.nan if isinstance(x, str) and x.strip() in na_values else x)
# Skip conversion if already non-object data type, except bool which can potentially be categorical
if (
not isinstance(series_cleaned.dtype, pd.BooleanDtype)
and not isinstance(series_cleaned.dtype, pd.StringDtype)
and series_cleaned.dtype != "object"
):
# Keep the original data type for non-object dtypes
df_converted[col] = series
schema[col] = str(series_cleaned.dtype)
continue
# print(f"type: {series_cleaned.dtype}, column: {series_cleaned.name}")
if not isinstance(series_cleaned.dtype, pd.BooleanDtype):
# Try numeric (int or float)
numeric = pd.to_numeric(series_cleaned, errors="coerce")
if numeric.notna().sum() >= n_rows * convert_threshold:
if (numeric.dropna() % 1 == 0).all():
try:
df_converted[col] = numeric.astype("int") # Nullable integer
schema[col] = "int"
continue
except Exception:
pass
df_converted[col] = numeric.astype("double")
schema[col] = "double"
continue
# Try datetime
datetime_converted = pd.to_datetime(series_cleaned, errors="coerce")
if datetime_converted.notna().sum() >= n_rows * convert_threshold:
df_converted[col] = datetime_converted
schema[col] = "timestamp"
continue
# Try timedelta
try:
timedelta_converted = pd.to_timedelta(series_cleaned, errors="coerce")
if timedelta_converted.notna().sum() >= n_rows * convert_threshold:
df_converted[col] = timedelta_converted
schema[col] = "timedelta"
continue
except TypeError:
pass
# Try category
try:
unique_ratio = series_cleaned.nunique(dropna=True) / n_rows if n_rows > 0 else 1.0
if unique_ratio <= category_threshold:
df_converted[col] = series_cleaned.astype("category")
schema[col] = "category"
continue
except Exception:
pass
df_converted[col] = series_cleaned.astype("string")
schema[col] = "string"
return df_converted, schema

View File

@@ -1,7 +1,37 @@
import logging
import os
class ColoredFormatter(logging.Formatter):
# ANSI escape codes for colors
COLORS = {
# logging.DEBUG: "\033[36m", # Cyan
# logging.INFO: "\033[32m", # Green
logging.WARNING: "\033[33m", # Yellow
logging.ERROR: "\033[31m", # Red
logging.CRITICAL: "\033[1;31m", # Bright Red
}
RESET = "\033[0m" # Reset to default
def __init__(self, fmt, datefmt, use_color=True):
super().__init__(fmt, datefmt)
self.use_color = use_color
def format(self, record):
formatted = super().format(record)
if self.use_color:
color = self.COLORS.get(record.levelno, "")
if color:
return f"{color}{formatted}{self.RESET}"
return formatted
logger = logging.getLogger(__name__)
logger_formatter = logging.Formatter(
"[%(name)s: %(asctime)s] {%(lineno)d} %(levelname)s - %(message)s", "%m-%d %H:%M:%S"
use_color = True
if os.getenv("FLAML_LOG_NO_COLOR"):
use_color = False
logger_formatter = ColoredFormatter(
"[%(name)s: %(asctime)s] {%(lineno)d} %(levelname)s - %(message)s", "%m-%d %H:%M:%S", use_color
)
logger.propagate = False

View File

@@ -127,9 +127,21 @@ def metric_loss_score(
import datasets
datasets_metric_name = huggingface_submetric_to_metric.get(metric_name, metric_name.split(":")[0])
metric = datasets.load_metric(datasets_metric_name, trust_remote_code=True)
metric_mode = huggingface_metric_to_mode[datasets_metric_name]
# datasets>=3 removed load_metric; prefer evaluate if available
try:
import evaluate
metric = evaluate.load(datasets_metric_name, trust_remote_code=True)
except Exception:
if hasattr(datasets, "load_metric"):
metric = datasets.load_metric(datasets_metric_name, trust_remote_code=True)
else:
from datasets import load_metric as _load_metric # older datasets
metric = _load_metric(datasets_metric_name, trust_remote_code=True)
if metric_name.startswith("seqeval"):
y_processed_true = [[labels[tr] for tr in each_list] for each_list in y_processed_true]
elif metric in ("pearsonr", "spearmanr"):

View File

@@ -9,6 +9,7 @@ import os
import shutil
import signal
import sys
import threading
import time
import warnings
from contextlib import contextmanager
@@ -89,24 +90,28 @@ def limit_resource(memory_limit, time_limit):
except ValueError:
# According to https://bugs.python.org/issue40518, it's a mac-specific error.
pass
main_thread = False
if time_limit is not None:
alarm_set = False
if time_limit is not None and threading.current_thread() is threading.main_thread():
try:
signal.signal(signal.SIGALRM, TimeoutHandler)
signal.alarm(int(time_limit) or 1)
main_thread = True
alarm_set = True
except ValueError:
pass
try:
yield
finally:
if main_thread:
if alarm_set:
signal.alarm(0)
if memory_limit > 0:
resource.setrlimit(resource.RLIMIT_AS, (soft, hard))
try:
resource.setrlimit(resource.RLIMIT_AS, (soft, hard))
except ValueError:
pass
class BaseEstimator:
class BaseEstimator(sklearn.base.ClassifierMixin, sklearn.base.BaseEstimator):
"""The abstract class for all learners.
Typical examples:
@@ -130,7 +135,7 @@ class BaseEstimator:
self._task = task if isinstance(task, Task) else task_factory(task, None, None)
self.params = self.config2params(config)
self.estimator_class = self._model = None
if "_estimator_type" in config:
if "_estimator_type" in self.params:
self._estimator_type = self.params.pop("_estimator_type")
else:
self._estimator_type = "classifier" if self._task.is_classification() else "regressor"
@@ -1691,7 +1696,7 @@ class XGBoostEstimator(SKLearnEstimator):
# use_label_encoder is deprecated in 1.7.
if xgboost_version < "1.7.0":
params["use_label_encoder"] = params.get("use_label_encoder", False)
if "n_jobs" in config:
if "n_jobs" in params:
params["nthread"] = params.pop("n_jobs")
return params
@@ -1891,7 +1896,7 @@ class RandomForestEstimator(SKLearnEstimator, LGBMEstimator):
params = super().config2params(config)
if "max_leaves" in params:
params["max_leaf_nodes"] = params.get("max_leaf_nodes", params.pop("max_leaves"))
if not self._task.is_classification() and "criterion" in config:
if not self._task.is_classification() and "criterion" in params:
params.pop("criterion")
if "random_state" not in params:
params["random_state"] = 12032022
@@ -2066,8 +2071,8 @@ class CatBoostEstimator(BaseEstimator):
self.estimator_class = CatBoostRegressor
def fit(self, X_train, y_train, budget=None, free_mem_ratio=0, **kwargs):
if "is_retrain" in kwargs:
kwargs.pop("is_retrain")
kwargs.pop("is_retrain", None)
kwargs.pop("groups", None)
start_time = time.time()
deadline = start_time + budget if budget else np.inf
train_dir = f"catboost_{str(start_time)}"
@@ -2344,7 +2349,7 @@ class SGDEstimator(SKLearnEstimator):
params["loss"] = params.get("loss", None)
if params["loss"] is None and self._task.is_classification():
params["loss"] = "log_loss" if SKLEARN_VERSION >= "1.1" else "log"
if not self._task.is_classification():
if not self._task.is_classification() and "n_jobs" in params:
params.pop("n_jobs")
if params.get("penalty") != "elasticnet":
@@ -2815,7 +2820,7 @@ class suppress_stdout_stderr:
# Open a pair of null files
self.null_fds = [os.open(os.devnull, os.O_RDWR) for x in range(2)]
# Save the actual stdout (1) and stderr (2) file descriptors.
self.save_fds = (os.dup(1), os.dup(2))
self.save_fds = [os.dup(1), os.dup(2)]
def __enter__(self):
# Assign the null pointers to stdout and stderr.
@@ -2827,5 +2832,5 @@ class suppress_stdout_stderr:
os.dup2(self.save_fds[0], 1)
os.dup2(self.save_fds[1], 2)
# Close the null files
os.close(self.null_fds[0])
os.close(self.null_fds[1])
for fd in self.null_fds + self.save_fds:
os.close(fd)

View File

@@ -77,6 +77,14 @@ class TrainingArgumentsForAuto(TrainingArguments):
logging_steps: int = field(default=500, metadata={"help": "Log every X updates steps."})
# Newer versions of HuggingFace Transformers may access `TrainingArguments.generation_config`
# (e.g., in generation-aware trainers/callbacks). Keep this attribute to remain compatible
# while defaulting to None for non-generation tasks.
generation_config: Optional[object] = field(
default=None,
metadata={"help": "Optional generation config (or path) used by generation-aware trainers."},
)
@staticmethod
def load_args_from_console():
from dataclasses import fields

View File

@@ -442,8 +442,8 @@ class GenericTask(Task):
X_train_all, y_train_all = shuffle(X_train_all, y_train_all, random_state=RANDOM_SEED)
if data_is_df:
X_train_all.reset_index(drop=True, inplace=True)
if isinstance(y_train_all, pd.Series):
y_train_all.reset_index(drop=True, inplace=True)
if isinstance(y_train_all, pd.Series):
y_train_all.reset_index(drop=True, inplace=True)
X_train, y_train = X_train_all, y_train_all
state.groups_all = state.groups
@@ -746,7 +746,10 @@ class GenericTask(Task):
elif isinstance(kf, TimeSeriesSplit):
kf = kf.split(X_train_split, y_train_split)
else:
kf = kf.split(X_train_split)
try:
kf = kf.split(X_train_split)
except TypeError:
kf = kf.split(X_train_split, y_train_split)
for train_index, val_index in kf:
if shuffle:
@@ -769,10 +772,10 @@ class GenericTask(Task):
if not is_spark_dataframe:
y_train, y_val = y_train_split[train_index], y_train_split[val_index]
if weight is not None:
fit_kwargs["sample_weight"], weight_val = (
weight[train_index],
weight[val_index],
fit_kwargs["sample_weight"] = (
weight[train_index] if isinstance(weight, np.ndarray) else weight.iloc[train_index]
)
weight_val = weight[val_index] if isinstance(weight, np.ndarray) else weight.iloc[val_index]
if groups is not None:
fit_kwargs["groups"] = (
groups[train_index] if isinstance(groups, np.ndarray) else groups.iloc[train_index]

View File

@@ -192,7 +192,7 @@ class Task(ABC):
* Valid str options depend on different tasks.
For classification tasks, valid choices are
["auto", 'stratified', 'uniform', 'time', 'group']. "auto" -> stratified.
For regression tasks, valid choices are ["auto", 'uniform', 'time'].
For regression tasks, valid choices are ["auto", 'uniform', 'time', 'group'].
"auto" -> uniform.
For time series forecast tasks, must be "auto" or 'time'.
For ranking task, must be "auto" or 'group'.

View File

@@ -529,7 +529,7 @@ def remove_ts_duplicates(
duplicates = X.duplicated()
if any(duplicates):
logger.warning("Duplicate timestamp values found in timestamp column. " f"\n{X.loc[duplicates, X][time_col]}")
logger.warning("Duplicate timestamp values found in timestamp column. " f"\n{X.loc[duplicates, time_col]}")
X = X.drop_duplicates()
logger.warning("Removed duplicate rows based on all columns")
assert (

View File

@@ -1,3 +1,4 @@
import inspect
import time
try:
@@ -106,12 +107,17 @@ class TemporalFusionTransformerEstimator(TimeSeriesEstimator):
def fit(self, X_train, y_train, budget=None, **kwargs):
import warnings
import pytorch_lightning as pl
try:
import lightning.pytorch as pl
from lightning.pytorch.callbacks import EarlyStopping, LearningRateMonitor
from lightning.pytorch.loggers import TensorBoardLogger
except ImportError:
import pytorch_lightning as pl
from pytorch_lightning.callbacks import EarlyStopping, LearningRateMonitor
from pytorch_lightning.loggers import TensorBoardLogger
import torch
from pytorch_forecasting import TemporalFusionTransformer
from pytorch_forecasting.metrics import QuantileLoss
from pytorch_lightning.callbacks import EarlyStopping, LearningRateMonitor
from pytorch_lightning.loggers import TensorBoardLogger
# a bit of monkey patching to fix the MacOS test
# all the log_prediction method appears to do is plot stuff, which ?breaks github tests
@@ -132,12 +138,26 @@ class TemporalFusionTransformerEstimator(TimeSeriesEstimator):
lr_logger = LearningRateMonitor() # log the learning rate
logger = TensorBoardLogger(kwargs.get("log_dir", "lightning_logs")) # logging results to a tensorboard
default_trainer_kwargs = dict(
gpus=self._kwargs.get("gpu_per_trial", [0]) if torch.cuda.is_available() else None,
max_epochs=max_epochs,
gradient_clip_val=gradient_clip_val,
callbacks=[lr_logger, early_stop_callback],
logger=logger,
)
# PyTorch Lightning >=2.0 replaced `gpus` with `accelerator`/`devices`.
# Also, passing `gpus=None` is not accepted on newer versions.
trainer_sig_params = inspect.signature(pl.Trainer.__init__).parameters
if torch.cuda.is_available() and "gpus" in trainer_sig_params:
gpus = self._kwargs.get("gpu_per_trial", None)
if gpus is not None:
default_trainer_kwargs["gpus"] = gpus
elif torch.cuda.is_available() and "devices" in trainer_sig_params:
devices = self._kwargs.get("gpu_per_trial", None)
if devices == -1:
devices = "auto"
if devices is not None:
default_trainer_kwargs["accelerator"] = "gpu"
default_trainer_kwargs["devices"] = devices
trainer = pl.Trainer(
**default_trainer_kwargs,
)
@@ -157,7 +177,14 @@ class TemporalFusionTransformerEstimator(TimeSeriesEstimator):
val_dataloaders=val_dataloader,
)
best_model_path = trainer.checkpoint_callback.best_model_path
best_tft = TemporalFusionTransformer.load_from_checkpoint(best_model_path)
# PyTorch 2.6 changed `torch.load` default `weights_only` from False -> True.
# Some Lightning checkpoints (including those produced here) can require full unpickling.
# This path is generated locally during training, so it's trusted.
load_sig_params = inspect.signature(TemporalFusionTransformer.load_from_checkpoint).parameters
if "weights_only" in load_sig_params:
best_tft = TemporalFusionTransformer.load_from_checkpoint(best_model_path, weights_only=False)
else:
best_tft = TemporalFusionTransformer.load_from_checkpoint(best_model_path)
train_time = time.time() - current_time
self._model = best_tft
return train_time

View File

@@ -9,6 +9,7 @@ import numpy as np
try:
import pandas as pd
from pandas import DataFrame, Series, to_datetime
from pandas.api.types import is_datetime64_any_dtype
from scipy.sparse import issparse
from sklearn.compose import ColumnTransformer
from sklearn.impute import SimpleImputer
@@ -392,6 +393,15 @@ class DataTransformerTS:
assert len(self.num_columns) == 0, "Trying to call fit() twice, something is wrong"
for column in X.columns:
# Never treat the time column as a feature for sklearn preprocessing
if column == self.time_col:
continue
# Robust datetime detection (covers datetime64[ms/us/ns], tz-aware, etc.)
if is_datetime64_any_dtype(X[column]):
self.datetime_columns.append(column)
continue
# sklearn/utils/validation.py needs int/float values
if X[column].dtype.name in ("object", "category", "string"):
if (

View File

@@ -1,10 +1,14 @@
import atexit
import functools
import json
import logging
import os
import pickle
import random
import sys
import tempfile
import time
import warnings
from concurrent.futures import ThreadPoolExecutor, wait
from typing import MutableMapping
import mlflow
@@ -12,14 +16,15 @@ import pandas as pd
from mlflow.entities import Metric, Param, RunTag
from mlflow.exceptions import MlflowException
from mlflow.utils.autologging_utils import AUTOLOGGING_INTEGRATIONS, autologging_is_disabled
from packaging.requirements import Requirement
from scipy.sparse import issparse
from sklearn import tree
try:
from pyspark.ml import Pipeline as SparkPipeline
from pyspark.ml import PipelineModel as SparkPipelineModel
except ImportError:
class SparkPipeline:
class SparkPipelineModel:
pass
@@ -32,6 +37,84 @@ from flaml.version import __version__
SEARCH_MAX_RESULTS = 5000 # Each train should not have more than 5000 trials
IS_RENAME_CHILD_RUN = os.environ.get("FLAML_IS_RENAME_CHILD_RUN", "false").lower() == "true"
REMOVE_REQUIREMENT_LIST = [
"synapseml-cognitive",
"synapseml-core",
"synapseml-deep-learning",
"synapseml-internal",
"synapseml-mlflow",
"synapseml-opencv",
"synapseml-vw",
"synapseml-lightgbm",
"synapseml-utils",
"nni",
"optuna",
]
OPTIONAL_REMOVE_REQUIREMENT_LIST = ["pytorch-lightning", "transformers"]
os.environ["MLFLOW_ENABLE_ARTIFACTS_PROGRESS_BAR"] = os.environ.get("MLFLOW_ENABLE_ARTIFACTS_PROGRESS_BAR", "false")
MLFLOW_NUM_WORKERS = int(os.environ.get("FLAML_MLFLOW_NUM_WORKERS", os.cpu_count() * 4 if os.cpu_count() else 2))
executor = ThreadPoolExecutor(max_workers=MLFLOW_NUM_WORKERS)
atexit.register(lambda: executor.shutdown(wait=True))
IS_CLEAN_LOGS = os.environ.get("FLAML_IS_CLEAN_LOGS", "1")
if IS_CLEAN_LOGS == "1":
logging.getLogger("synapse.ml").setLevel(logging.CRITICAL)
logging.getLogger("mlflow.utils").setLevel(logging.CRITICAL)
logging.getLogger("mlflow.utils.environment").setLevel(logging.CRITICAL)
logging.getLogger("mlflow.models.model").setLevel(logging.CRITICAL)
warnings.simplefilter("ignore", category=FutureWarning)
warnings.simplefilter("ignore", category=UserWarning)
def convert_requirement(requirement_list: list[str]):
ret = (
[Requirement(s.strip().lower()) for s in requirement_list]
if mlflow.__version__ <= "2.17.0"
else requirement_list
)
return ret
def time_it(func_or_code=None):
"""
Decorator or function that measures execution time.
Can be used in three ways:
1. As a decorator with no arguments: @time_it
2. As a decorator with arguments: @time_it()
3. As a function call with a string of code to execute and time: time_it("some_code()")
Args:
func_or_code (callable or str, optional): Either a function to decorate or
a string of code to execute and time.
Returns:
callable or None: Returns a decorated function if used as a decorator,
or None if used to execute a string of code.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
start_time = time.time()
result = func(*args, **kwargs)
end_time = time.time()
logger.debug(f"Execution of {func.__name__} took {end_time - start_time:.4f} seconds")
return result
return wrapper
if callable(func_or_code):
return decorator(func_or_code)
elif func_or_code is None:
return decorator
else:
start_time = time.time()
exec(func_or_code)
end_time = time.time()
logger.debug(f"Execution\n```\n{func_or_code}\n```\ntook {end_time - start_time:.4f} seconds")
def flatten_dict(d: MutableMapping, sep: str = ".") -> MutableMapping:
@@ -49,23 +132,28 @@ def is_autolog_enabled():
return not all(autologging_is_disabled(k) for k in AUTOLOGGING_INTEGRATIONS.keys())
def get_mlflow_log_latency(model_history=False):
def get_mlflow_log_latency(model_history=False, delete_run=True):
try:
FLAML_MLFLOW_LOG_LATENCY = float(os.getenv("FLAML_MLFLOW_LOG_LATENCY", 0))
except ValueError:
FLAML_MLFLOW_LOG_LATENCY = 0
if FLAML_MLFLOW_LOG_LATENCY >= 0.1:
return FLAML_MLFLOW_LOG_LATENCY
st = time.time()
with mlflow.start_run(nested=True, run_name="get_mlflow_log_latency") as run:
if model_history:
sk_model = tree.DecisionTreeClassifier()
mlflow.sklearn.log_model(sk_model, "sk_models")
mlflow.sklearn.log_model(Pipeline([("estimator", sk_model)]), "sk_pipeline")
mlflow.sklearn.log_model(sk_model, "model")
with tempfile.TemporaryDirectory() as tmpdir:
pickle_fpath = os.path.join(tmpdir, f"tmp_{int(time.time()*1000)}")
pickle_fpath = os.path.join(tmpdir, f"tmp_{int(time.time() * 1000)}")
with open(pickle_fpath, "wb") as f:
pickle.dump(sk_model, f)
mlflow.log_artifact(pickle_fpath, "sk_model1")
mlflow.log_artifact(pickle_fpath, "sk_model2")
mlflow.log_artifact(pickle_fpath, "sk_model")
mlflow.set_tag("synapseml.ui.visible", "false") # not shown inline in fabric
mlflow.delete_run(run.info.run_id)
if delete_run:
mlflow.delete_run(run.info.run_id)
et = time.time()
return et - st
return 3 * (et - st)
def infer_signature(X_train=None, y_train=None, dataframe=None, label=None):
@@ -98,12 +186,76 @@ def infer_signature(X_train=None, y_train=None, dataframe=None, label=None):
)
def update_and_install_requirements(
run_id=None,
model_name=None,
model_version=None,
remove_list=None,
artifact_path="model",
dst_path=None,
install_with_ipython=False,
):
if not (run_id or (model_name and model_version)):
raise ValueError(
"Please provide `run_id` or both `model_name` and `model_version`. If all three are provided, `run_id` will be used."
)
if install_with_ipython:
from IPython import get_ipython
if not remove_list:
remove_list = [
"synapseml-cognitive",
"synapseml-core",
"synapseml-deep-learning",
"synapseml-internal",
"synapseml-mlflow",
"synapseml-opencv",
"synapseml-vw",
"synapseml-lightgbm",
"synapseml-utils",
"flaml", # flaml is needed for AutoML models, should be pre-installed in the runtime
"pyspark", # fabric internal pyspark should be pre-installed in the runtime
]
# Download model artifacts
client = mlflow.MlflowClient()
if not run_id:
run_id = client.get_model_version(model_name, model_version).run_id
if not dst_path:
dst_path = os.path.join(tempfile.gettempdir(), "model_artifacts")
os.makedirs(dst_path, exist_ok=True)
client.download_artifacts(run_id, artifact_path, dst_path)
requirements_path = os.path.join(dst_path, artifact_path, "requirements.txt")
with open(requirements_path) as f:
reqs = f.read().splitlines()
old_reqs = [Requirement(req) for req in reqs if req]
old_reqs_dict = {req.name: str(req) for req in old_reqs}
for req in remove_list:
req = Requirement(req)
if req.name in old_reqs_dict:
old_reqs_dict.pop(req.name, None)
new_reqs_list = list(old_reqs_dict.values())
with open(requirements_path, "w") as f:
f.write("\n".join(new_reqs_list))
if install_with_ipython:
get_ipython().run_line_magic("pip", f"install -r {requirements_path} -q")
else:
logger.info(f"You can run `pip install -r {requirements_path}` to install dependencies.")
return requirements_path
def _mlflow_wrapper(evaluation_func, mlflow_exp_id, mlflow_config=None, extra_tags=None, autolog=False):
def wrapped(*args, **kwargs):
if mlflow_config is not None:
from synapse.ml.mlflow import set_mlflow_env_config
try:
from synapse.ml.mlflow import set_mlflow_env_config
set_mlflow_env_config(mlflow_config)
set_mlflow_env_config(mlflow_config)
except Exception:
pass
import mlflow
if mlflow_exp_id is not None:
@@ -124,7 +276,20 @@ def _mlflow_wrapper(evaluation_func, mlflow_exp_id, mlflow_config=None, extra_ta
def _get_notebook_name():
return None
try:
import re
from synapse.ml.mlflow import get_mlflow_env_config
from synapse.ml.mlflow.shared_platform_utils import get_artifact
notebook_id = get_mlflow_env_config(False).artifact_id
current_notebook = get_artifact(notebook_id)
notebook_name = re.sub("\\W+", "-", current_notebook.displayName).strip()
return notebook_name
except Exception as e:
logger.debug(f"Failed to get notebook name: {e}")
return None
def safe_json_dumps(obj):
@@ -163,6 +328,8 @@ class MLflowIntegration:
self.has_model = False
self.only_history = False
self._do_log_model = True
self.futures = {}
self.futures_log_model = {}
self.extra_tag = (
extra_tag
@@ -170,6 +337,9 @@ class MLflowIntegration:
else {"extra_tag.sid": f"flaml_{__version__}_{int(time.time())}_{random.randint(1001, 9999)}"}
)
self.start_time = time.time()
self.experiment_type = experiment_type
self.update_autolog_state()
self.mlflow_client = mlflow.tracking.MlflowClient()
parent_run_info = mlflow.active_run().info if mlflow.active_run() is not None else None
if parent_run_info:
@@ -188,8 +358,6 @@ class MLflowIntegration:
mlflow.set_experiment(experiment_name=mlflow_exp_name)
self.experiment_id = mlflow.tracking.fluent._active_experiment_id
self.experiment_name = mlflow.get_experiment(self.experiment_id).name
self.experiment_type = experiment_type
self.update_autolog_state()
if self.autolog:
# only end user created parent run in autolog scenario
@@ -197,9 +365,12 @@ class MLflowIntegration:
def set_mlflow_config(self):
if self.driver_mlflow_env_config is not None:
from synapse.ml.mlflow import set_mlflow_env_config
try:
from synapse.ml.mlflow import set_mlflow_env_config
set_mlflow_env_config(self.driver_mlflow_env_config)
set_mlflow_env_config(self.driver_mlflow_env_config)
except Exception:
pass
def wrap_evaluation_function(self, evaluation_function):
wrapped_evaluation_function = _mlflow_wrapper(
@@ -267,6 +438,7 @@ class MLflowIntegration:
else:
_tags = []
self.mlflow_client.log_batch(run_id=target_id, metrics=_metrics, params=[], tags=_tags)
return f"Successfully copy_mlflow_run run_id {src_id} to run_id {target_id}"
def record_trial(self, result, trial, metric):
if isinstance(result, dict):
@@ -334,12 +506,31 @@ class MLflowIntegration:
self.copy_mlflow_run(best_mlflow_run_id, self.parent_run_id)
self.has_summary = True
def log_model(self, model, estimator, signature=None):
def log_model(self, model, estimator, signature=None, run_id=None):
if not self._do_log_model:
return
logger.debug(f"logging model {estimator}")
ret_message = f"Successfully log_model {estimator} to run_id {run_id}"
optional_remove_list = (
[] if estimator in ["transformer", "transformer_ms", "tcn", "tft"] else OPTIONAL_REMOVE_REQUIREMENT_LIST
)
run = mlflow.active_run()
if run and run.info.run_id == self.parent_run_id:
logger.debug(
f"Current active run_id {run.info.run_id} == parent_run_id {self.parent_run_id}, Starting run_id {run_id}"
)
mlflow.start_run(run_id=run_id, nested=True)
elif run and run.info.run_id != run_id:
ret_message = (
f"Error: Should log_model {estimator} to run_id {run_id}, but logged to run_id {run.info.run_id}"
)
logger.error(ret_message)
else:
logger.debug(f"No active run, start run_id {run_id}")
mlflow.start_run(run_id=run_id)
logger.debug(f"logged model {estimator} to run_id {mlflow.active_run().info.run_id}")
if estimator.endswith("_spark"):
mlflow.spark.log_model(model, estimator, signature=signature)
# mlflow.spark.log_model(model, estimator, signature=signature)
mlflow.spark.log_model(model, "model", signature=signature)
elif estimator in ["lgbm"]:
mlflow.lightgbm.log_model(model, estimator, signature=signature)
@@ -352,42 +543,93 @@ class MLflowIntegration:
elif estimator in ["prophet"]:
mlflow.prophet.log_model(model, estimator, signature=signature)
elif estimator in ["orbit"]:
pass
logger.warning(f"Unsupported model: {estimator}. No model logged.")
else:
mlflow.sklearn.log_model(model, estimator, signature=signature)
future = executor.submit(
lambda: mlflow.models.model.update_model_requirements(
model_uri=f"runs:/{run_id}/{'model' if estimator.endswith('_spark') else estimator}",
operation="remove",
requirement_list=convert_requirement(REMOVE_REQUIREMENT_LIST + optional_remove_list),
)
)
self.futures[future] = f"run_{run_id}_requirements_updated"
if not run or run.info.run_id == self.parent_run_id:
logger.debug(f"Ending current run_id {mlflow.active_run().info.run_id}")
mlflow.end_run()
return ret_message
def _pickle_and_log_artifact(self, obj, artifact_name, pickle_fname="temp_.pkl"):
def _pickle_and_log_artifact(self, obj, artifact_name, pickle_fname="temp_.pkl", run_id=None):
if not self._do_log_model:
return
return True
with tempfile.TemporaryDirectory() as tmpdir:
pickle_fpath = os.path.join(tmpdir, pickle_fname)
try:
with open(pickle_fpath, "wb") as f:
pickle.dump(obj, f)
mlflow.log_artifact(pickle_fpath, artifact_name)
mlflow.log_artifact(pickle_fpath, artifact_name, run_id)
return True
except Exception as e:
logger.debug(f"Failed to pickle and log artifact {artifact_name}, error: {e}")
logger.debug(f"Failed to pickle and log {artifact_name}, error: {e}")
return False
def pickle_and_log_automl_artifacts(self, automl, model, estimator, signature=None):
def _log_pipeline(self, pipeline, flavor_name, pipeline_name, signature, run_id, estimator=None):
logger.debug(f"logging pipeline {flavor_name}:{pipeline_name}:{estimator}")
ret_message = f"Successfully _log_pipeline {flavor_name}:{pipeline_name}:{estimator} to run_id {run_id}"
optional_remove_list = (
[] if estimator in ["transformer", "transformer_ms", "tcn", "tft"] else OPTIONAL_REMOVE_REQUIREMENT_LIST
)
run = mlflow.active_run()
if run and run.info.run_id == self.parent_run_id:
logger.debug(
f"Current active run_id {run.info.run_id} == parent_run_id {self.parent_run_id}, Starting run_id {run_id}"
)
mlflow.start_run(run_id=run_id, nested=True)
elif run and run.info.run_id != run_id:
ret_message = f"Error: Should _log_pipeline {flavor_name}:{pipeline_name}:{estimator} model to run_id {run_id}, but logged to run_id {run.info.run_id}"
logger.error(ret_message)
else:
logger.debug(f"No active run, start run_id {run_id}")
mlflow.start_run(run_id=run_id)
logger.debug(
f"logging pipeline {flavor_name}:{pipeline_name}:{estimator} to run_id {mlflow.active_run().info.run_id}"
)
if flavor_name == "sklearn":
mlflow.sklearn.log_model(pipeline, pipeline_name, signature=signature)
elif flavor_name == "spark":
mlflow.spark.log_model(pipeline, pipeline_name, signature=signature)
else:
logger.warning(f"Unsupported pipeline flavor: {flavor_name}. No model logged.")
future = executor.submit(
lambda: mlflow.models.model.update_model_requirements(
model_uri=f"runs:/{run_id}/{pipeline_name}",
operation="remove",
requirement_list=convert_requirement(REMOVE_REQUIREMENT_LIST + optional_remove_list),
)
)
self.futures[future] = f"run_{run_id}_requirements_updated"
if not run or run.info.run_id == self.parent_run_id:
logger.debug(f"Ending current run_id {mlflow.active_run().info.run_id}")
mlflow.end_run()
return ret_message
def pickle_and_log_automl_artifacts(self, automl, model, estimator, signature=None, run_id=None):
"""log automl artifacts to mlflow
load back with `automl = mlflow.pyfunc.load_model(model_run_id_or_uri)`, then do prediction with `automl.predict(X)`
"""
logger.debug(f"logging automl artifacts {estimator}")
self._pickle_and_log_artifact(automl.feature_transformer, "feature_transformer", "feature_transformer.pkl")
self._pickle_and_log_artifact(automl.label_transformer, "label_transformer", "label_transformer.pkl")
# Test test_mlflow 1 and 4 will get error: TypeError: cannot pickle '_io.TextIOWrapper' object
# try:
# self._pickle_and_log_artifact(automl, "automl", "automl.pkl")
# except TypeError:
# pass
logger.debug(f"logging automl estimator {estimator}")
# self._pickle_and_log_artifact(
# automl.feature_transformer, "feature_transformer", "feature_transformer.pkl", run_id
# )
# self._pickle_and_log_artifact(automl.label_transformer, "label_transformer", "label_transformer.pkl", run_id)
if estimator.endswith("_spark"):
# spark pipeline is not supported yet
return
feature_transformer = automl.feature_transformer
if isinstance(feature_transformer, Pipeline):
if isinstance(feature_transformer, Pipeline) and not estimator.endswith("_spark"):
pipeline = feature_transformer
pipeline.steps.append(("estimator", model))
elif isinstance(feature_transformer, SparkPipeline):
elif isinstance(feature_transformer, SparkPipelineModel) and estimator.endswith("_spark"):
pipeline = feature_transformer
pipeline.stages.append(model)
elif not estimator.endswith("_spark"):
@@ -395,24 +637,26 @@ class MLflowIntegration:
steps.append(("estimator", model))
pipeline = Pipeline(steps)
else:
stages = [feature_transformer]
stages = []
if feature_transformer is not None:
stages.append(feature_transformer)
stages.append(model)
pipeline = SparkPipeline(stages=stages)
if isinstance(pipeline, SparkPipeline):
pipeline = SparkPipelineModel(stages=stages)
if isinstance(pipeline, SparkPipelineModel):
logger.debug(f"logging spark pipeline {estimator}")
mlflow.spark.log_model(pipeline, "automl_pipeline", signature=signature)
self._log_pipeline(pipeline, "spark", "model", signature, run_id, estimator)
else:
# Add a log named "model" to fit default settings
logger.debug(f"logging sklearn pipeline {estimator}")
mlflow.sklearn.log_model(pipeline, "automl_pipeline", signature=signature)
mlflow.sklearn.log_model(pipeline, "model", signature=signature)
self._log_pipeline(pipeline, "sklearn", "model", signature, run_id, estimator)
return f"Successfully pickle_and_log_automl_artifacts {estimator} to run_id {run_id}"
@time_it
def record_state(self, automl, search_state, estimator):
_st = time.time()
automl_metric_name = (
automl._state.metric if isinstance(automl._state.metric, str) else automl._state.error_metric
)
if automl._state.error_metric.startswith("1-"):
automl_metric_value = 1 - search_state.val_loss
elif automl._state.error_metric.startswith("-"):
@@ -425,6 +669,8 @@ class MLflowIntegration:
else:
config = search_state.config
self.automl_user_configurations = safe_json_dumps(automl._automl_user_configurations)
info = {
"metrics": {
"iter_counter": automl._track_iter,
@@ -445,7 +691,7 @@ class MLflowIntegration:
"flaml.meric": automl_metric_name,
"flaml.run_source": "flaml-automl",
"flaml.log_type": self.log_type,
"flaml.automl_user_configurations": safe_json_dumps(automl._automl_user_configurations),
"flaml.automl_user_configurations": self.automl_user_configurations,
},
"params": {
"sample_size": search_state.sample_size,
@@ -472,33 +718,70 @@ class MLflowIntegration:
run_name = f"{self.parent_run_name}_child_{self.child_counter}"
else:
run_name = None
_t1 = time.time()
wait(self.futures_log_model)
_t2 = time.time() - _t1
logger.debug(f"wait futures_log_model in record_state took {_t2} seconds")
with mlflow.start_run(nested=True, run_name=run_name) as child_run:
self._log_info_to_run(info, child_run.info.run_id, log_params=True)
future = executor.submit(lambda: self._log_info_to_run(info, child_run.info.run_id, log_params=True))
self.futures[future] = f"iter_{automl._track_iter}_log_info_to_run"
future = executor.submit(lambda: self._log_automl_configurations(child_run.info.run_id))
self.futures[future] = f"iter_{automl._track_iter}_log_automl_configurations"
if automl._state.model_history:
self.log_model(
search_state.trained_estimator._model, estimator, signature=automl.estimator_signature
)
self.pickle_and_log_automl_artifacts(
automl, search_state.trained_estimator, estimator, signature=automl.pipeline_signature
)
if estimator.endswith("_spark"):
future = executor.submit(
lambda: self.log_model(
search_state.trained_estimator._model,
estimator,
automl.estimator_signature,
child_run.info.run_id,
)
)
self.futures_log_model[future] = f"record_state-log_model_{estimator}"
else:
future = executor.submit(
lambda: self.pickle_and_log_automl_artifacts(
automl,
search_state.trained_estimator,
estimator,
automl.pipeline_signature,
child_run.info.run_id,
)
)
self.futures_log_model[future] = f"record_state-pickle_and_log_automl_artifacts_{estimator}"
self.manual_run_ids.append(child_run.info.run_id)
self.child_counter += 1
return f"Successfully record_state iteration {automl._track_iter}"
@time_it
def log_automl(self, automl):
self.set_best_iter(automl)
if self.autolog:
if self.parent_run_id is not None:
mlflow.start_run(run_id=self.parent_run_id, experiment_id=self.experiment_id)
mlflow.log_metric("best_validation_loss", automl._state.best_loss)
mlflow.log_metric("best_iteration", automl._best_iteration)
mlflow.log_metric("num_child_runs", len(self.infos))
if automl._trained_estimator is not None and not self.has_model:
self.log_model(
automl._trained_estimator._model, automl.best_estimator, signature=automl.estimator_signature
)
self.pickle_and_log_automl_artifacts(
automl, automl.model, automl.best_estimator, signature=automl.pipeline_signature
)
mlflow.log_metrics(
{
"best_validation_loss": automl._state.best_loss,
"best_iteration": automl._best_iteration,
"num_child_runs": len(self.infos),
}
)
if (
automl._trained_estimator is not None
and not self.has_model
and automl._trained_estimator._model is not None
):
if automl.best_estimator.endswith("_spark"):
self.log_model(
automl._trained_estimator._model,
automl.best_estimator,
automl.estimator_signature,
self.parent_run_id,
)
else:
self.pickle_and_log_automl_artifacts(
automl, automl.model, automl.best_estimator, automl.pipeline_signature, self.parent_run_id
)
self.has_model = True
self.adopt_children(automl)
@@ -515,30 +798,65 @@ class MLflowIntegration:
if "ml" in conf.keys():
conf = conf["ml"]
mlflow.log_params(conf)
mlflow.log_param("best_learner", automl._best_estimator)
mlflow.log_params({**conf, "best_learner": automl._best_estimator}, run_id=self.parent_run_id)
if not self.has_summary:
logger.info(f"logging best model {automl.best_estimator}")
self.copy_mlflow_run(best_mlflow_run_id, self.parent_run_id)
future = executor.submit(lambda: self.copy_mlflow_run(best_mlflow_run_id, self.parent_run_id))
self.futures[future] = "log_automl_copy_mlflow_run"
future = executor.submit(lambda: self._log_automl_configurations(self.parent_run_id))
self.futures[future] = "log_automl_log_automl_configurations"
self.has_summary = True
if automl._trained_estimator is not None and not self.has_model:
self.log_model(
automl._trained_estimator._model,
automl.best_estimator,
signature=automl.estimator_signature,
)
self.pickle_and_log_automl_artifacts(
automl, automl.model, automl.best_estimator, signature=automl.pipeline_signature
)
_t1 = time.time()
wait(self.futures_log_model)
_t2 = time.time() - _t1
logger.debug(f"wait futures_log_model in log_automl took {_t2} seconds")
if (
automl._trained_estimator is not None
and not self.has_model
and automl._trained_estimator._model is not None
):
if automl.best_estimator.endswith("_spark"):
future = executor.submit(
lambda: self.log_model(
automl._trained_estimator._model,
automl.best_estimator,
signature=automl.estimator_signature,
run_id=self.parent_run_id,
)
)
self.futures_log_model[future] = f"log_automl-log_model_{automl.best_estimator}"
else:
future = executor.submit(
lambda: self.pickle_and_log_automl_artifacts(
automl,
automl.model,
automl.best_estimator,
signature=automl.pipeline_signature,
run_id=self.parent_run_id,
)
)
self.futures_log_model[
future
] = f"log_automl-pickle_and_log_automl_artifacts_{automl.best_estimator}"
self.has_model = True
def resume_mlflow(self):
if len(self.resume_params) > 0:
mlflow.autolog(**self.resume_params)
def _log_automl_configurations(self, run_id):
self.mlflow_client.log_text(
run_id=run_id,
text=self.automl_user_configurations,
artifact_file="automl_configurations/automl_user_configurations.json",
)
return f"Successfully _log_automl_configurations to run_id {run_id}"
def _log_info_to_run(self, info, run_id, log_params=False):
_metrics = [Metric(key, value, int(time.time() * 1000), 0) for key, value in info["metrics"].items()]
_tags = [RunTag(key, str(value)) for key, value in info["tags"].items()]
_tags = [
RunTag(key, str(value)[:5000]) for key, value in info["tags"].items()
] # AML will raise error if value length > 5000
_params = [
Param(key, str(value))
for key, value in info["params"].items()
@@ -554,6 +872,7 @@ class MLflowIntegration:
_tags = [RunTag("mlflow.parentRunId", run_id)]
self.mlflow_client.log_batch(run_id=run.info.run_id, metrics=_metrics, params=[], tags=_tags)
del info["submetrics"]["values"]
return f"Successfully _log_info_to_run to run_id {run_id}"
def adopt_children(self, result=None):
"""

37
flaml/tune/logger.py Normal file
View File

@@ -0,0 +1,37 @@
import logging
import os
class ColoredFormatter(logging.Formatter):
# ANSI escape codes for colors
COLORS = {
# logging.DEBUG: "\033[36m", # Cyan
# logging.INFO: "\033[32m", # Green
logging.WARNING: "\033[33m", # Yellow
logging.ERROR: "\033[31m", # Red
logging.CRITICAL: "\033[1;31m", # Bright Red
}
RESET = "\033[0m" # Reset to default
def __init__(self, fmt, datefmt, use_color=True):
super().__init__(fmt, datefmt)
self.use_color = use_color
def format(self, record):
formatted = super().format(record)
if self.use_color:
color = self.COLORS.get(record.levelno, "")
if color:
return f"{color}{formatted}{self.RESET}"
return formatted
logger = logging.getLogger(__name__)
use_color = True
if os.getenv("FLAML_LOG_NO_COLOR"):
use_color = False
logger_formatter = ColoredFormatter(
"[%(name)s: %(asctime)s] {%(lineno)d} %(levelname)s - %(message)s", "%m-%d %H:%M:%S", use_color
)
logger.propagate = False

View File

@@ -244,13 +244,32 @@ class BlendSearch(Searcher):
evaluated_rewards=evaluated_rewards,
)
except (AssertionError, ValueError):
self._gs = GlobalSearch(
space=gs_space,
metric=metric,
mode=mode,
seed=gs_seed,
sampler=sampler,
)
try:
self._gs = GlobalSearch(
space=gs_space,
metric=metric,
mode=mode,
seed=gs_seed,
sampler=sampler,
)
except ValueError:
# Ray Tune's OptunaSearch converts Tune domains into Optuna
# distributions. Optuna disallows integer log distributions
# with step != 1 (e.g., qlograndint with q>1), which can
# raise here. Fall back to FLAML's OptunaSearch wrapper,
# which handles these spaces more permissively.
if getattr(GlobalSearch, "__module__", "").startswith("ray.tune"):
from .suggestion import OptunaSearch as _FallbackOptunaSearch
self._gs = _FallbackOptunaSearch(
space=gs_space,
metric=metric,
mode=mode,
seed=gs_seed,
sampler=sampler,
)
else:
raise
self._gs.space = space
else:
self._gs = None

View File

@@ -35,6 +35,73 @@ from ..sample import (
Quantized,
Uniform,
)
# If Ray is installed, flaml.tune may re-export Ray Tune sampling functions.
# In that case, the search space contains Ray Tune Domain/Sampler objects,
# which should be accepted by our Optuna search-space conversion.
try:
from ray import __version__ as _ray_version # type: ignore
if str(_ray_version).startswith("1."):
from ray.tune.sample import ( # type: ignore
Categorical as _RayCategorical,
)
from ray.tune.sample import (
Domain as _RayDomain,
)
from ray.tune.sample import (
Float as _RayFloat,
)
from ray.tune.sample import (
Integer as _RayInteger,
)
from ray.tune.sample import (
LogUniform as _RayLogUniform,
)
from ray.tune.sample import (
Quantized as _RayQuantized,
)
from ray.tune.sample import (
Uniform as _RayUniform,
)
else:
from ray.tune.search.sample import ( # type: ignore
Categorical as _RayCategorical,
)
from ray.tune.search.sample import (
Domain as _RayDomain,
)
from ray.tune.search.sample import (
Float as _RayFloat,
)
from ray.tune.search.sample import (
Integer as _RayInteger,
)
from ray.tune.search.sample import (
LogUniform as _RayLogUniform,
)
from ray.tune.search.sample import (
Quantized as _RayQuantized,
)
from ray.tune.search.sample import (
Uniform as _RayUniform,
)
_FLOAT_TYPES = (Float, _RayFloat)
_INTEGER_TYPES = (Integer, _RayInteger)
_CATEGORICAL_TYPES = (Categorical, _RayCategorical)
_DOMAIN_TYPES = (Domain, _RayDomain)
_QUANTIZED_TYPES = (Quantized, _RayQuantized)
_UNIFORM_TYPES = (Uniform, _RayUniform)
_LOGUNIFORM_TYPES = (LogUniform, _RayLogUniform)
except Exception: # pragma: no cover
_FLOAT_TYPES = (Float,)
_INTEGER_TYPES = (Integer,)
_CATEGORICAL_TYPES = (Categorical,)
_DOMAIN_TYPES = (Domain,)
_QUANTIZED_TYPES = (Quantized,)
_UNIFORM_TYPES = (Uniform,)
_LOGUNIFORM_TYPES = (LogUniform,)
from ..trial import flatten_dict, unflatten_dict
from .variant_generator import parse_spec_vars
@@ -850,19 +917,22 @@ class OptunaSearch(Searcher):
def resolve_value(domain: Domain) -> ot.distributions.BaseDistribution:
quantize = None
sampler = domain.get_sampler()
if isinstance(sampler, Quantized):
# Ray Tune Domains and FLAML Domains both provide get_sampler(), but
# fall back to the .sampler attribute for robustness.
sampler = domain.get_sampler() if hasattr(domain, "get_sampler") else getattr(domain, "sampler", None)
if isinstance(sampler, _QUANTIZED_TYPES) or type(sampler).__name__ == "Quantized":
quantize = sampler.q
sampler = sampler.sampler
if isinstance(sampler, LogUniform):
sampler = getattr(sampler, "sampler", None) or sampler.get_sampler()
if isinstance(sampler, _LOGUNIFORM_TYPES) or type(sampler).__name__ == "LogUniform":
logger.warning(
"Optuna does not handle quantization in loguniform "
"sampling. The parameter will be passed but it will "
"probably be ignored."
)
if isinstance(domain, Float):
if isinstance(sampler, LogUniform):
if isinstance(domain, _FLOAT_TYPES) or type(domain).__name__ == "Float":
if isinstance(sampler, _LOGUNIFORM_TYPES) or type(sampler).__name__ == "LogUniform":
if quantize:
logger.warning(
"Optuna does not support both quantization and "
@@ -870,17 +940,17 @@ class OptunaSearch(Searcher):
)
return ot.distributions.LogUniformDistribution(domain.lower, domain.upper)
elif isinstance(sampler, Uniform):
elif isinstance(sampler, _UNIFORM_TYPES) or type(sampler).__name__ == "Uniform":
if quantize:
return ot.distributions.DiscreteUniformDistribution(domain.lower, domain.upper, quantize)
return ot.distributions.UniformDistribution(domain.lower, domain.upper)
elif isinstance(domain, Integer):
if isinstance(sampler, LogUniform):
elif isinstance(domain, _INTEGER_TYPES) or type(domain).__name__ == "Integer":
if isinstance(sampler, _LOGUNIFORM_TYPES) or type(sampler).__name__ == "LogUniform":
# ``step`` argument Deprecated in v2.0.0. ``step`` argument should be 1 in Log Distribution
# The removal of this feature is currently scheduled for v4.0.0,
return ot.distributions.IntLogUniformDistribution(domain.lower, domain.upper - 1, step=1)
elif isinstance(sampler, Uniform):
elif isinstance(sampler, _UNIFORM_TYPES) or type(sampler).__name__ == "Uniform":
# Upper bound should be inclusive for quantization and
# exclusive otherwise
return ot.distributions.IntUniformDistribution(
@@ -888,16 +958,16 @@ class OptunaSearch(Searcher):
domain.upper - int(bool(not quantize)),
step=quantize or 1,
)
elif isinstance(domain, Categorical):
if isinstance(sampler, Uniform):
elif isinstance(domain, _CATEGORICAL_TYPES) or type(domain).__name__ == "Categorical":
if isinstance(sampler, _UNIFORM_TYPES) or type(sampler).__name__ == "Uniform":
return ot.distributions.CategoricalDistribution(domain.categories)
raise ValueError(
"Optuna search does not support parameters of type "
"`{}` with samplers of type `{}`".format(type(domain).__name__, type(domain.sampler).__name__)
"`{}` with samplers of type `{}`".format(type(domain).__name__, type(sampler).__name__)
)
# Parameter name is e.g. "a/b/c" for nested dicts
values = {"/".join(path): resolve_value(domain) for path, domain in domain_vars}
return values
return values

View File

@@ -162,6 +162,10 @@ def broadcast_code(custom_code="", file_name="mylearner"):
assert isinstance(MyLargeLGBM(), LGBMEstimator)
```
"""
# Check if Spark is available
spark_available, _ = check_spark()
# Write to local driver file system
flaml_path = os.path.dirname(os.path.abspath(__file__))
custom_code = textwrap.dedent(custom_code)
custom_path = os.path.join(flaml_path, file_name + ".py")
@@ -169,6 +173,24 @@ def broadcast_code(custom_code="", file_name="mylearner"):
with open(custom_path, "w") as f:
f.write(custom_code)
# If using Spark, broadcast the code content to executors
if spark_available:
spark = SparkSession.builder.getOrCreate()
bc_code = spark.sparkContext.broadcast(custom_code)
# Execute a job to ensure the code is distributed to all executors
def _write_code(bc):
code = bc.value
import os
module_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), file_name + ".py")
os.makedirs(os.path.dirname(module_path), exist_ok=True)
with open(module_path, "w") as f:
f.write(code)
return True
spark.sparkContext.parallelize(range(1)).map(lambda _: _write_code(bc_code)).collect()
return custom_path

View File

@@ -21,11 +21,11 @@ except (ImportError, AssertionError):
from .analysis import ExperimentAnalysis as EA
else:
ray_available = True
import logging
from flaml.tune.spark.utils import PySparkOvertimeMonitor, check_spark
from .logger import logger, logger_formatter
from .result import DEFAULT_METRIC
from .trial import Trial
@@ -41,8 +41,6 @@ except ImportError:
internal_mlflow = False
logger = logging.getLogger(__name__)
logger.propagate = False
_use_ray = True
_runner = None
_verbose = 0
@@ -197,9 +195,16 @@ def report(_metric=None, **kwargs):
global _training_iteration
if _use_ray:
try:
from ray import tune
from ray import __version__ as ray_version
return tune.report(_metric, **kwargs)
if ray_version.startswith("1."):
from ray import tune
return tune.report(_metric, **kwargs)
else: # ray>=2
from ray.air import session
return session.report(metrics={"metric": _metric, **kwargs})
except ImportError:
# calling tune.report() outside tune.run()
return
@@ -260,6 +265,8 @@ def run(
mlflow_exp_name: Optional[str] = None,
automl_info: Optional[Tuple[float]] = None,
extra_tag: Optional[dict] = None,
cost_attr: Optional[str] = "auto",
cost_budget: Optional[float] = None,
**ray_args,
):
"""The function-based way of performing HPO.
@@ -462,6 +469,12 @@ def run(
overwritten by the value of `n_concurrent_trials` in AutoML. When <= 0, the concurrent trials
will be set to the number of executors.
extra_tag: dict, default=None | Extra tags to be added to the mlflow runs created by autologging.
cost_attr: None or str to specify the attribute to evaluate the cost of different trials.
Default is "auto", which means that we will automatically choose the cost attribute to use (depending
on the nature of the resource budget). When cost_attr is set to None, cost differences between different trials will be omitted
in our search algorithm. When cost_attr is set to a str different from "auto" and "time_total_s",
this cost_attr must be available in the result dict of the trial.
cost_budget: A float of the cost budget. Only valid when cost_attr is a str different from "auto" and "time_total_s".
**ray_args: keyword arguments to pass to ray.tune.run().
Only valid when use_ray=True.
"""
@@ -506,10 +519,6 @@ def run(
elif not logger.hasHandlers():
# Add the console handler.
_ch = logging.StreamHandler(stream=sys.stdout)
logger_formatter = logging.Formatter(
"[%(name)s: %(asctime)s] {%(lineno)d} %(levelname)s - %(message)s",
"%m-%d %H:%M:%S",
)
_ch.setFormatter(logger_formatter)
logger.addHandler(_ch)
if verbose <= 2:
@@ -600,6 +609,8 @@ def run(
metric_constraints=metric_constraints,
use_incumbent_result_in_evaluation=use_incumbent_result_in_evaluation,
lexico_objectives=lexico_objectives,
cost_attr=cost_attr,
cost_budget=cost_budget,
)
else:
if metric is None or mode is None:
@@ -735,10 +746,16 @@ def run(
max_concurrent = max(1, search_alg.max_concurrent)
else:
max_concurrent = max(1, max_spark_parallelism)
passed_in_n_concurrent_trials = max(n_concurrent_trials, max_concurrent)
n_concurrent_trials = min(
n_concurrent_trials if n_concurrent_trials > 0 else num_executors,
max_concurrent,
)
if n_concurrent_trials < passed_in_n_concurrent_trials:
logger.warning(
f"The actual concurrent trials is {n_concurrent_trials}. You can set the environment "
f"variable `FLAML_MAX_CONCURRENT` to '{passed_in_n_concurrent_trials}' to override the detected num of executors."
)
with parallel_backend("spark"):
with Parallel(n_jobs=n_concurrent_trials, verbose=max(0, (verbose - 1) * 50)) as parallel:
try:
@@ -760,7 +777,7 @@ def run(
and num_failures < upperbound_num_failures
):
if automl_info and automl_info[0] > 0 and time_budget_s < np.inf:
time_budget_s -= automl_info[0]
time_budget_s -= automl_info[0] * n_concurrent_trials
logger.debug(f"Remaining time budget with mlflow log latency: {time_budget_s} seconds.")
while len(_runner.running_trials) < n_concurrent_trials:
# suggest trials for spark

View File

@@ -1 +1 @@
__version__ = "2.3.2"
__version__ = "2.4.0"

View File

@@ -0,0 +1,259 @@
absl-py==2.4.0
accelerate==1.12.0
aiohappyeyeballs==2.6.1
aiohttp==3.13.3
aiosignal==1.4.0
alembic==1.18.4
annotated-doc==0.0.4
annotated-types==0.7.0
anyio==4.12.1
argon2-cffi==25.1.0
argon2-cffi-bindings==25.1.0
arrow==1.4.0
asttokens==3.0.1
async-lru==2.1.0
async-timeout==5.0.1
attrs==25.4.0
autopage==0.6.0
babel==2.18.0
backports.strenum==1.3.1
beautifulsoup4==4.14.3
bleach==6.3.0
cachetools==5.5.2
catboost==1.2.8
certifi==2026.1.4
cffi==2.0.0
cfgv==3.5.0
charset-normalizer==3.4.4
click==8.3.1
cliff==4.13.1
cloudpickle==3.1.2
cmaes==0.12.0
cmd2==3.2.0
cmdstanpy==1.3.0
colorlog==6.10.1
comm==0.2.3
contourpy==1.3.2
convertdate==2.4.1
coverage==7.13.4
cryptography==46.0.5
cuda-bindings==12.9.4
cuda-pathfinder==1.3.4
cycler==0.12.1
databricks-sdk==0.88.0
dataclasses==0.6
datasets==4.5.0
debugpy==1.8.20
decorator==5.2.1
defusedxml==0.7.1
dill==0.4.0
distlib==0.4.0
evaluate==0.4.6
exceptiongroup==1.3.1
executing==2.2.1
fastapi==0.129.0
fastjsonschema==2.21.2
filelock==3.24.0
-e git+https://github.com/microsoft/FLAML@cc903f78074f93f2fcf644c046c9e3036ce7fb38#egg=FLAML
fonttools==4.61.1
fqdn==1.5.1
frozenlist==1.8.0
fsspec==2025.10.0
gitdb==4.0.12
GitPython==3.1.46
google-auth==2.48.0
graphviz==0.21
greenlet==3.3.1
h11==0.16.0
hcrystalball==0.1.12
hf-xet==1.2.0
holidays==0.90
httpcore==1.0.9
httpx==0.28.1
huggingface_hub==1.4.1
identify==2.6.16
idna==3.11
importlib_metadata==8.7.1
importlib_resources==6.5.2
iniconfig==2.3.0
ipykernel==7.2.0
ipython==8.38.0
ipywidgets==8.1.8
isoduration==20.11.0
jedi==0.19.2
Jinja2==3.1.6
joblib==1.3.2
joblibspark==0.6.0
json5==0.13.0
jsonpointer==3.0.0
jsonschema==4.26.0
jsonschema-specifications==2025.9.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.12.0
jupyter-lsp==2.3.0
jupyter_client==8.8.0
jupyter_core==5.9.1
jupyter_server==2.17.0
jupyter_server_terminals==0.5.4
jupyterlab==4.5.4
jupyterlab_pygments==0.3.0
jupyterlab_server==2.28.0
jupyterlab_widgets==3.0.16
kiwisolver==1.4.9
lark==1.3.1
liac-arff==2.5.0
lightgbm==4.6.0
lightning==2.6.1
lightning-utilities==0.15.2
lunardate==0.2.2
Mako==1.3.10
markdown-it-py==4.0.0
MarkupSafe==3.0.3
matplotlib==3.10.8
matplotlib-inline==0.2.1
mdurl==0.1.2
minio==7.2.20
mistune==3.2.0
mlflow-skinny==2.22.1
mpmath==1.3.0
multidict==6.7.1
multiprocess==0.70.18
narwhals==2.16.0
nbclient==0.10.4
nbconvert==7.17.0
nbformat==5.10.4
nest-asyncio==1.6.0
networkx==3.4.2
nltk==3.9.2
nodeenv==1.10.0
notebook==7.5.3
notebook_shim==0.2.4
numpy==1.26.4
nvidia-cublas-cu12==12.8.4.1
nvidia-cuda-cupti-cu12==12.8.90
nvidia-cuda-nvrtc-cu12==12.8.93
nvidia-cuda-runtime-cu12==12.8.90
nvidia-cudnn-cu12==9.10.2.21
nvidia-cufft-cu12==11.3.3.83
nvidia-cufile-cu12==1.13.1.3
nvidia-curand-cu12==10.3.9.90
nvidia-cusolver-cu12==11.7.3.90
nvidia-cusparse-cu12==12.5.8.93
nvidia-cusparselt-cu12==0.7.1
nvidia-nccl-cu12==2.27.5
nvidia-nvjitlink-cu12==12.8.93
nvidia-nvshmem-cu12==3.4.5
nvidia-nvtx-cu12==12.8.90
openml==0.15.1
opentelemetry-api==1.39.1
opentelemetry-sdk==1.39.1
opentelemetry-semantic-conventions==0.60b1
optuna==2.8.0
overrides==7.7.0
packaging==24.2
pandas==2.3.3
pandocfilters==1.5.1
parso==0.8.6
patsy==1.0.2
pexpect==4.9.0
pillow==12.1.1
platformdirs==4.9.1
plotly==6.5.2
pluggy==1.6.0
pre_commit==4.5.1
prettytable==3.17.0
prometheus_client==0.24.1
prompt_toolkit==3.0.52
propcache==0.4.1
prophet==1.3.0
protobuf==6.33.5
psutil==7.2.2
ptyprocess==0.7.0
pure_eval==0.2.3
pyarrow==23.0.0
pyasn1==0.6.2
pyasn1_modules==0.4.2
pycparser==3.0
pycryptodome==3.23.0
pydantic==2.12.5
pydantic_core==2.41.5
Pygments==2.19.2
pyluach==2.3.0
PyMeeus==0.5.12
pyparsing==3.3.2
pyperclip==1.11.0
pytest==9.0.2
pytest-rerunfailures==16.1
python-dateutil==2.9.0.post0
python-json-logger==4.0.0
pytorch-forecasting==1.6.1
pytorch-lightning==2.6.1
pytz==2025.2
PyYAML==6.0.3
pyzmq==27.1.0
referencing==0.37.0
regex==2026.1.15
requests==2.32.5
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rfc3987-syntax==1.1.0
rgf-python==3.12.0
rich==14.3.2
rich-argparse==1.7.2
rouge_score==0.1.2
rpds-py==0.30.0
rsa==4.9.1
safetensors==0.7.0
scikit-base==0.13.1
scikit-learn==1.7.2
scipy==1.15.3
Send2Trash==2.1.0
seqeval==1.2.2
shellingham==1.5.4
six==1.17.0
smmap==5.0.2
soupsieve==2.8.3
SQLAlchemy==2.0.46
sqlparse==0.5.5
stack-data==0.6.3
stanio==0.5.1
starlette==0.52.1
statsmodels==0.14.6
stevedore==5.6.0
sympy==1.14.0
tensorboardX==2.6.4
terminado==0.18.1
thop==0.1.1.post2209072238
threadpoolctl==3.6.0
tinycss2==1.4.0
tokenizers==0.22.2
tomli==2.4.0
torch==2.10.0
torchmetrics==1.8.2
torchvision==0.25.0
tornado==6.5.4
tqdm==4.67.3
traitlets==5.14.3
transformers==5.1.0
triton==3.6.0
typer==0.23.1
typer-slim==0.23.1
typing-inspection==0.4.2
typing_extensions==4.15.0
tzdata==2025.3
uri-template==1.3.0
urllib3==2.6.3
uvicorn==0.40.0
virtualenv==20.36.1
wcwidth==0.6.0
webcolors==25.10.0
webencodings==0.5.1
websocket-client==1.9.0
widgetsnbextension==4.0.15
workalendar==17.0.0
xgboost==1.7.6
xmltodict==1.0.2
xxhash==3.6.0
yarl==1.22.0
zipp==3.23.0

View File

@@ -0,0 +1,237 @@
absl-py==2.4.0
accelerate==1.12.0
aiohappyeyeballs==2.6.1
aiohttp==3.13.3
aiosignal==1.4.0
alembic==1.18.4
annotated-doc==0.0.4
annotated-types==0.7.0
anyio==4.12.1
argon2-cffi==25.1.0
argon2-cffi-bindings==25.1.0
arrow==1.4.0
asttokens==3.0.1
async-lru==2.1.0
async-timeout==5.0.1
attrs==25.4.0
autopage==0.6.0
babel==2.18.0
backports.strenum==1.3.1
beautifulsoup4==4.14.3
bleach==6.3.0
cachetools==5.5.2
catboost==1.2.8
certifi==2026.1.4
cffi==2.0.0
cfgv==3.5.0
charset-normalizer==3.4.4
click==8.3.1
cliff==4.13.1
cloudpickle==3.1.2
cmaes==0.12.0
cmd2==3.2.0
colorama==0.4.6
colorlog==6.10.1
comm==0.2.3
contourpy==1.3.2
convertdate==2.4.1
coverage==7.13.4
cryptography==46.0.5
cycler==0.12.1
databricks-sdk==0.88.0
dataclasses==0.6
datasets==4.5.0
debugpy==1.8.20
decorator==5.2.1
defusedxml==0.7.1
dill==0.4.0
distlib==0.4.0
evaluate==0.4.6
exceptiongroup==1.3.1
executing==2.2.1
fastapi==0.129.0
fastjsonschema==2.21.2
filelock==3.24.0
-e git+https://github.com/microsoft/FLAML@6f573938bf46a906dfba1101034fd05813268c29#egg=FLAML
fonttools==4.61.1
fqdn==1.5.1
frozenlist==1.8.0
fsspec==2025.10.0
gitdb==4.0.12
GitPython==3.1.46
google-auth==2.48.0
graphviz==0.21
greenlet==3.3.1
h11==0.16.0
hcrystalball==0.1.12
hf-xet==1.2.0
httpcore==1.0.9
httpx==0.28.1
huggingface_hub==1.4.1
identify==2.6.16
idna==3.11
importlib_metadata==8.7.1
iniconfig==2.3.0
ipykernel==7.2.0
ipython==8.38.0
ipywidgets==8.1.8
isoduration==20.11.0
jedi==0.19.2
Jinja2==3.1.6
joblib==1.3.2
joblibspark==0.6.0
json5==0.13.0
jsonpointer==3.0.0
jsonschema==4.26.0
jsonschema-specifications==2025.9.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.12.0
jupyter-lsp==2.3.0
jupyter_client==8.8.0
jupyter_core==5.9.1
jupyter_server==2.17.0
jupyter_server_terminals==0.5.4
jupyterlab==4.5.4
jupyterlab_pygments==0.3.0
jupyterlab_server==2.28.0
jupyterlab_widgets==3.0.16
kiwisolver==1.4.9
lark==1.3.1
liac-arff==2.5.0
lightgbm==4.6.0
lightning==2.6.1
lightning-utilities==0.15.2
lunardate==0.2.2
Mako==1.3.10
markdown-it-py==4.0.0
MarkupSafe==3.0.3
matplotlib==3.10.8
matplotlib-inline==0.2.1
mdurl==0.1.2
minio==7.2.20
mistune==3.2.0
mlflow-skinny==2.22.1
mpmath==1.3.0
multidict==6.7.1
multiprocess==0.70.18
narwhals==2.16.0
nbclient==0.10.4
nbconvert==7.17.0
nbformat==5.10.4
nest-asyncio==1.6.0
networkx==3.4.2
nltk==3.9.2
nodeenv==1.10.0
notebook==7.5.3
notebook_shim==0.2.4
numpy==1.26.4
openml==0.15.1
opentelemetry-api==1.39.1
opentelemetry-sdk==1.39.1
opentelemetry-semantic-conventions==0.60b1
optuna==2.8.0
overrides==7.7.0
packaging==24.2
pandas==2.3.3
pandocfilters==1.5.1
parso==0.8.6
patsy==1.0.2
pillow==12.1.1
platformdirs==4.9.1
plotly==6.5.2
pluggy==1.6.0
pre_commit==4.5.1
prettytable==3.17.0
prometheus_client==0.24.1
prompt_toolkit==3.0.52
propcache==0.4.1
protobuf==6.33.5
psutil==7.2.2
pure_eval==0.2.3
pyarrow==23.0.0
pyasn1==0.6.2
pyasn1_modules==0.4.2
pycparser==3.0
pycryptodome==3.23.0
pydantic==2.12.5
pydantic_core==2.41.5
Pygments==2.19.2
pyluach==2.3.0
PyMeeus==0.5.12
pyparsing==3.3.2
pyperclip==1.11.0
pyreadline3==3.5.4
pytest==9.0.2
pytest-rerunfailures==16.1
python-dateutil==2.9.0.post0
python-json-logger==4.0.0
pytorch-forecasting==1.6.1
pytorch-lightning==2.6.1
pytz==2025.2
pywinpty==3.0.3
PyYAML==6.0.3
pyzmq==27.1.0
referencing==0.37.0
regex==2026.1.15
requests==2.32.5
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rfc3987-syntax==1.1.0
rgf-python==3.12.0
rich==14.3.2
rich-argparse==1.7.2
rouge_score==0.1.2
rpds-py==0.30.0
rsa==4.9.1
safetensors==0.7.0
scikit-base==0.13.1
scikit-learn==1.7.2
scipy==1.15.3
Send2Trash==2.1.0
seqeval==1.2.2
shellingham==1.5.4
six==1.17.0
smmap==5.0.2
soupsieve==2.8.3
SQLAlchemy==2.0.46
sqlparse==0.5.5
stack-data==0.6.3
starlette==0.52.1
statsmodels==0.14.6
stevedore==5.6.0
sympy==1.14.0
tensorboardX==2.6.4
terminado==0.18.1
thop==0.1.1.post2209072238
threadpoolctl==3.6.0
tinycss2==1.4.0
tokenizers==0.22.2
tomli==2.4.0
torch==2.10.0
torchmetrics==1.8.2
torchvision==0.25.0
tornado==6.5.4
tqdm==4.67.3
traitlets==5.14.3
transformers==5.1.0
typer==0.23.1
typer-slim==0.23.1
typing-inspection==0.4.2
typing_extensions==4.15.0
tzdata==2025.3
uri-template==1.3.0
urllib3==2.6.3
uvicorn==0.40.0
virtualenv==20.36.1
wcwidth==0.6.0
webcolors==25.10.0
webencodings==0.5.1
websocket-client==1.9.0
widgetsnbextension==4.0.15
workalendar==17.0.0
xgboost==1.7.6
xmltodict==1.0.2
xxhash==3.6.0
yarl==1.22.0
zipp==3.23.0

View File

@@ -0,0 +1,217 @@
absl-py==2.3.1
accelerate==1.12.0
aiohappyeyeballs==2.6.1
aiohttp==3.13.3
aiosignal==1.4.0
alembic==1.18.0
annotated-doc==0.0.4
annotated-types==0.7.0
anyio==4.12.1
appnope==0.1.4
argon2-cffi==25.1.0
argon2-cffi-bindings==25.1.0
arrow==1.4.0
asttokens==3.0.1
async-lru==2.0.5
attrs==25.4.0
babel==2.17.0
beautifulsoup4==4.14.3
bleach==6.3.0
cachetools==5.5.2
catboost==1.2.8
certifi==2026.1.4
cffi==2.0.0
cfgv==3.5.0
charset-normalizer==3.4.4
click==8.3.1
cloudpickle==3.1.2
colorlog==6.10.1
comm==0.2.3
contourpy==1.3.3
convertdate==2.4.0
coverage==7.13.1
cycler==0.12.1
databricks-sdk==0.77.0
dataclasses==0.6
datasets==4.4.2
debugpy==1.8.19
decorator==5.2.1
defusedxml==0.7.1
dill==0.4.0
distlib==0.4.0
evaluate==0.4.6
executing==2.2.1
fastapi==0.128.0
fastjsonschema==2.21.2
filelock==3.20.3
-e git+https://github.com/microsoft/FLAML@3ab9ce3cda330a54210c591e89b7f8674948d607#egg=FLAML
fonttools==4.61.1
fqdn==1.5.1
frozenlist==1.8.0
fsspec==2025.10.0
gitdb==4.0.12
GitPython==3.1.46
google-auth==2.47.0
graphviz==0.21
h11==0.16.0
hcrystalball==0.1.12
hf-xet==1.2.0
httpcore==1.0.9
httpx==0.28.1
huggingface-hub==0.36.0
identify==2.6.15
idna==3.11
importlib_metadata==8.7.1
iniconfig==2.3.0
ipykernel==7.1.0
ipython==9.9.0
ipython_pygments_lexers==1.1.1
ipywidgets==8.1.8
isoduration==20.11.0
jedi==0.19.2
Jinja2==3.1.6
joblib==1.3.2
joblibspark==0.6.0
json5==0.13.0
jsonpointer==3.0.0
jsonschema==4.26.0
jsonschema-specifications==2025.9.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.12.0
jupyter-lsp==2.3.0
jupyter_client==8.8.0
jupyter_core==5.9.1
jupyter_server==2.17.0
jupyter_server_terminals==0.5.3
jupyterlab==4.5.1
jupyterlab_pygments==0.3.0
jupyterlab_server==2.28.0
jupyterlab_widgets==3.0.16
kiwisolver==1.4.9
lark==1.3.1
liac-arff==2.5.0
lightgbm==4.6.0
lightning==2.6.0
lightning-utilities==0.15.2
lunardate==0.2.2
Mako==1.3.10
MarkupSafe==3.0.3
matplotlib==3.10.8
matplotlib-inline==0.2.1
minio==7.2.20
mistune==3.2.0
mlflow-skinny==2.22.1
mpmath==1.3.0
multidict==6.7.0
multiprocess==0.70.18
narwhals==2.15.0
nbclient==0.10.4
nbconvert==7.16.6
nbformat==5.10.4
nest-asyncio==1.6.0
networkx==3.6.1
nltk==3.9.2
nodeenv==1.10.0
notebook==7.5.1
notebook_shim==0.2.4
numpy==1.26.4
openml==0.15.1
opentelemetry-api==1.39.1
opentelemetry-sdk==1.39.1
opentelemetry-semantic-conventions==0.60b1
optuna==3.6.1
overrides==7.7.0
packaging==24.2
pandas==2.3.3
pandocfilters==1.5.1
parso==0.8.5
patsy==1.0.2
pexpect==4.9.0
pillow==12.1.0
platformdirs==4.5.1
plotly==6.5.1
pluggy==1.6.0
pre_commit==4.5.1
prometheus_client==0.23.1
prompt_toolkit==3.0.52
propcache==0.4.1
protobuf==6.33.3
psutil==7.2.1
ptyprocess==0.7.0
pure_eval==0.2.3
pyarrow==22.0.0
pyasn1==0.6.1
pyasn1_modules==0.4.2
pycparser==2.23
pycryptodome==3.23.0
pydantic==2.12.5
pydantic_core==2.41.5
Pygments==2.19.2
pyluach==2.3.0
PyMeeus==0.5.12
pyparsing==3.3.1
pytest==9.0.2
pytest-rerunfailures==16.1
python-dateutil==2.9.0.post0
python-json-logger==4.0.0
pytorch-forecasting==1.5.0
pytorch-lightning==2.6.0
pytz==2025.2
PyYAML==6.0.3
pyzmq==27.1.0
referencing==0.37.0
regex==2025.11.3
requests==2.32.5
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rfc3987-syntax==1.1.0
rgf_python==3.12.0
rouge_score==0.1.2
rpds-py==0.30.0
rsa==4.9.1
safetensors==0.7.0
scikit-learn==1.8.0
scipy==1.16.3
Send2Trash==2.0.0
seqeval==1.2.2
six==1.17.0
smmap==5.0.2
soupsieve==2.8.1
SQLAlchemy==2.0.45
sqlparse==0.5.5
stack-data==0.6.3
starlette==0.50.0
statsmodels==0.14.6
sympy==1.14.0
tensorboardX==2.6.4
terminado==0.18.1
thop==0.1.1.post2209072238
threadpoolctl==3.6.0
tinycss2==1.4.0
tokenizers==0.22.2
torch==2.9.1
torchmetrics==1.8.2
torchvision==0.24.1
tornado==6.5.4
tqdm==4.67.1
traitlets==5.14.3
transformers==4.57.3
typing-inspection==0.4.2
typing_extensions==4.15.0
tzdata==2025.3
uri-template==1.3.0
urllib3==2.6.3
uvicorn==0.40.0
virtualenv==20.36.1
wcwidth==0.2.14
webcolors==25.10.0
webencodings==0.5.1
websocket-client==1.9.0
widgetsnbextension==4.0.15
workalendar==17.0.0
xgboost==3.1.3
xmltodict==1.0.2
xxhash==3.6.0
yarl==1.22.0
zipp==3.23.0

View File

@@ -0,0 +1,258 @@
absl-py==2.4.0
accelerate==1.12.0
aiohappyeyeballs==2.6.1
aiohttp==3.13.3
aiosignal==1.4.0
alembic==1.18.4
annotated-doc==0.0.4
annotated-types==0.7.0
anyio==4.12.1
argon2-cffi==25.1.0
argon2-cffi-bindings==25.1.0
arrow==1.4.0
asttokens==3.0.1
async-lru==2.1.0
attrs==25.4.0
autopage==0.6.0
babel==2.18.0
beautifulsoup4==4.14.3
bleach==6.3.0
cachetools==5.5.2
catboost==1.2.8
certifi==2026.1.4
cffi==2.0.0
cfgv==3.5.0
charset-normalizer==3.4.4
click==8.3.1
cliff==4.13.1
cloudpickle==3.1.2
cmaes==0.12.0
cmd2==3.2.0
cmdstanpy==1.3.0
colorlog==6.10.1
comm==0.2.3
contourpy==1.3.3
convertdate==2.4.1
coverage==7.13.4
cryptography==46.0.5
cuda-bindings==12.9.4
cuda-pathfinder==1.3.4
cycler==0.12.1
databricks-sdk==0.88.0
dataclasses==0.6
datasets==4.5.0
debugpy==1.8.20
decorator==5.2.1
defusedxml==0.7.1
dill==0.4.0
distlib==0.4.0
evaluate==0.4.6
executing==2.2.1
fastapi==0.129.0
fastjsonschema==2.21.2
filelock==3.24.0
-e git+https://github.com/microsoft/FLAML@4ac0347e5a032ff01b3ad05ac51863cfbd1a1b62#egg=FLAML
fonttools==4.61.1
fqdn==1.5.1
frozenlist==1.8.0
fsspec==2025.10.0
gitdb==4.0.12
GitPython==3.1.46
google-auth==2.48.0
graphviz==0.21
greenlet==3.3.1
h11==0.16.0
hcrystalball==0.1.12
hf-xet==1.2.0
holidays==0.90
httpcore==1.0.9
httpx==0.28.1
huggingface_hub==1.4.1
identify==2.6.16
idna==3.11
importlib_metadata==8.7.1
importlib_resources==6.5.2
iniconfig==2.3.0
ipykernel==7.2.0
ipython==9.10.0
ipython_pygments_lexers==1.1.1
ipywidgets==8.1.8
isoduration==20.11.0
jedi==0.19.2
Jinja2==3.1.6
joblib==1.3.2
joblibspark==0.6.0
json5==0.13.0
jsonpointer==3.0.0
jsonschema==4.26.0
jsonschema-specifications==2025.9.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.12.0
jupyter-lsp==2.3.0
jupyter_client==8.8.0
jupyter_core==5.9.1
jupyter_server==2.17.0
jupyter_server_terminals==0.5.4
jupyterlab==4.5.4
jupyterlab_pygments==0.3.0
jupyterlab_server==2.28.0
jupyterlab_widgets==3.0.16
kiwisolver==1.4.9
lark==1.3.1
liac-arff==2.5.0
lightgbm==4.6.0
lightning==2.6.1
lightning-utilities==0.15.2
lunardate==0.2.2
Mako==1.3.10
markdown-it-py==4.0.0
MarkupSafe==3.0.3
matplotlib==3.10.8
matplotlib-inline==0.2.1
mdurl==0.1.2
minio==7.2.20
mistune==3.2.0
mlflow-skinny==2.22.1
mpmath==1.3.0
multidict==6.7.1
multiprocess==0.70.18
narwhals==2.16.0
nbclient==0.10.4
nbconvert==7.17.0
nbformat==5.10.4
nest-asyncio==1.6.0
networkx==3.6.1
nltk==3.9.2
nodeenv==1.10.0
notebook==7.5.3
notebook_shim==0.2.4
numpy==1.26.4
nvidia-cublas-cu12==12.8.4.1
nvidia-cuda-cupti-cu12==12.8.90
nvidia-cuda-nvrtc-cu12==12.8.93
nvidia-cuda-runtime-cu12==12.8.90
nvidia-cudnn-cu12==9.10.2.21
nvidia-cufft-cu12==11.3.3.83
nvidia-cufile-cu12==1.13.1.3
nvidia-curand-cu12==10.3.9.90
nvidia-cusolver-cu12==11.7.3.90
nvidia-cusparse-cu12==12.5.8.93
nvidia-cusparselt-cu12==0.7.1
nvidia-nccl-cu12==2.27.5
nvidia-nvjitlink-cu12==12.8.93
nvidia-nvshmem-cu12==3.4.5
nvidia-nvtx-cu12==12.8.90
openml==0.15.1
opentelemetry-api==1.39.1
opentelemetry-sdk==1.39.1
opentelemetry-semantic-conventions==0.60b1
optuna==2.8.0
overrides==7.7.0
packaging==24.2
pandas==2.3.3
pandocfilters==1.5.1
parso==0.8.6
patsy==1.0.2
pexpect==4.9.0
pillow==12.1.1
platformdirs==4.9.1
plotly==6.5.2
pluggy==1.6.0
pre_commit==4.5.1
prettytable==3.17.0
prometheus_client==0.24.1
prompt_toolkit==3.0.52
propcache==0.4.1
prophet==1.3.0
protobuf==6.33.5
psutil==7.2.2
ptyprocess==0.7.0
pure_eval==0.2.3
py4j==0.10.9.7
pyarrow==23.0.0
pyasn1==0.6.2
pyasn1_modules==0.4.2
pycparser==3.0
pycryptodome==3.23.0
pydantic==2.12.5
pydantic_core==2.41.5
Pygments==2.19.2
pyluach==2.3.0
PyMeeus==0.5.12
pyparsing==3.3.2
pyperclip==1.11.0
pyspark==3.5.1
pytest==9.0.2
pytest-rerunfailures==16.1
python-dateutil==2.9.0.post0
python-json-logger==4.0.0
pytorch-forecasting==1.6.1
pytorch-lightning==2.6.1
pytz==2025.2
PyYAML==6.0.3
pyzmq==27.1.0
referencing==0.37.0
regex==2026.1.15
requests==2.32.5
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rfc3987-syntax==1.1.0
rgf-python==3.12.0
rich==14.3.2
rich-argparse==1.7.2
rouge_score==0.1.2
rpds-py==0.30.0
rsa==4.9.1
safetensors==0.7.0
scikit-base==0.13.1
scikit-learn==1.8.0
scipy==1.17.0
Send2Trash==2.1.0
seqeval==1.2.2
shellingham==1.5.4
six==1.17.0
smmap==5.0.2
soupsieve==2.8.3
SQLAlchemy==2.0.46
sqlparse==0.5.5
stack-data==0.6.3
stanio==0.5.1
starlette==0.52.1
statsmodels==0.14.6
stevedore==5.6.0
sympy==1.14.0
tensorboardX==2.6.4
terminado==0.18.1
thop==0.1.1.post2209072238
threadpoolctl==3.6.0
tinycss2==1.4.0
tokenizers==0.22.2
torch==2.10.0
torchmetrics==1.8.2
torchvision==0.25.0
tornado==6.5.4
tqdm==4.67.3
traitlets==5.14.3
transformers==5.1.0
triton==3.6.0
typer==0.23.1
typer-slim==0.23.1
typing-inspection==0.4.2
typing_extensions==4.15.0
tzdata==2025.3
uri-template==1.3.0
urllib3==2.6.3
uvicorn==0.40.0
virtualenv==20.36.1
wcwidth==0.6.0
webcolors==25.10.0
webencodings==0.5.1
websocket-client==1.9.0
widgetsnbextension==4.0.15
workalendar==17.0.0
xgboost==3.2.0
xmltodict==1.0.2
xxhash==3.6.0
yarl==1.22.0
zipp==3.23.0

View File

@@ -0,0 +1,234 @@
absl-py==2.4.0
accelerate==1.12.0
aiohappyeyeballs==2.6.1
aiohttp==3.13.3
aiosignal==1.4.0
alembic==1.18.4
annotated-doc==0.0.4
annotated-types==0.7.0
anyio==4.12.1
argon2-cffi==25.1.0
argon2-cffi-bindings==25.1.0
arrow==1.4.0
asttokens==3.0.1
async-lru==2.1.0
attrs==25.4.0
autopage==0.6.0
babel==2.18.0
beautifulsoup4==4.14.3
bleach==6.3.0
cachetools==5.5.2
catboost==1.2.8
certifi==2026.1.4
cffi==2.0.0
cfgv==3.5.0
charset-normalizer==3.4.4
click==8.3.1
cliff==4.13.1
cloudpickle==3.1.2
cmaes==0.12.0
cmd2==3.2.0
colorama==0.4.6
colorlog==6.10.1
comm==0.2.3
contourpy==1.3.3
convertdate==2.4.1
coverage==7.13.4
cryptography==46.0.5
cycler==0.12.1
databricks-sdk==0.88.0
dataclasses==0.6
datasets==4.5.0
debugpy==1.8.20
decorator==5.2.1
defusedxml==0.7.1
dill==0.4.0
distlib==0.4.0
evaluate==0.4.6
executing==2.2.1
fastapi==0.129.0
fastjsonschema==2.21.2
filelock==3.21.2
-e git+https://github.com/microsoft/FLAML@06111054a40bde57cca96dccd7476a3211b19893#egg=FLAML
fonttools==4.61.1
fqdn==1.5.1
frozenlist==1.8.0
fsspec==2025.10.0
gitdb==4.0.12
GitPython==3.1.46
google-auth==2.48.0
graphviz==0.21
greenlet==3.3.1
h11==0.16.0
hcrystalball==0.1.12
hf-xet==1.2.0
httpcore==1.0.9
httpx==0.28.1
huggingface_hub==1.4.1
identify==2.6.16
idna==3.11
importlib_metadata==8.7.1
iniconfig==2.3.0
ipykernel==7.2.0
ipython==9.10.0
ipython_pygments_lexers==1.1.1
ipywidgets==8.1.8
isoduration==20.11.0
jedi==0.19.2
Jinja2==3.1.6
joblib==1.3.2
joblibspark==0.6.0
json5==0.13.0
jsonpointer==3.0.0
jsonschema==4.26.0
jsonschema-specifications==2025.9.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.12.0
jupyter-lsp==2.3.0
jupyter_client==8.8.0
jupyter_core==5.9.1
jupyter_server==2.17.0
jupyter_server_terminals==0.5.4
jupyterlab==4.5.4
jupyterlab_pygments==0.3.0
jupyterlab_server==2.28.0
jupyterlab_widgets==3.0.16
kiwisolver==1.4.9
lark==1.3.1
liac-arff==2.5.0
lightgbm==4.6.0
lightning==2.6.1
lightning-utilities==0.15.2
lunardate==0.2.2
Mako==1.3.10
markdown-it-py==4.0.0
MarkupSafe==3.0.3
matplotlib==3.10.8
matplotlib-inline==0.2.1
mdurl==0.1.2
minio==7.2.20
mistune==3.2.0
mlflow-skinny==2.22.1
mpmath==1.3.0
multidict==6.7.1
multiprocess==0.70.18
narwhals==2.16.0
nbclient==0.10.4
nbconvert==7.17.0
nbformat==5.10.4
nest-asyncio==1.6.0
networkx==3.6.1
nltk==3.9.2
nodeenv==1.10.0
notebook==7.5.3
notebook_shim==0.2.4
numpy==1.26.4
openml==0.15.1
opentelemetry-api==1.39.1
opentelemetry-sdk==1.39.1
opentelemetry-semantic-conventions==0.60b1
optuna==2.8.0
overrides==7.7.0
packaging==24.2
pandas==2.3.3
pandocfilters==1.5.1
parso==0.8.6
patsy==1.0.2
pillow==12.1.1
platformdirs==4.7.0
plotly==6.5.2
pluggy==1.6.0
pre_commit==4.5.1
prettytable==3.17.0
prometheus_client==0.24.1
prompt_toolkit==3.0.52
propcache==0.4.1
protobuf==6.33.5
psutil==7.2.2
pure_eval==0.2.3
pyarrow==23.0.0
pyasn1==0.6.2
pyasn1_modules==0.4.2
pycparser==3.0
pycryptodome==3.23.0
pydantic==2.12.5
pydantic_core==2.41.5
Pygments==2.19.2
pyluach==2.3.0
PyMeeus==0.5.12
pyparsing==3.3.2
pyperclip==1.11.0
pyreadline3==3.5.4
pytest==9.0.2
pytest-rerunfailures==16.1
python-dateutil==2.9.0.post0
python-json-logger==4.0.0
pytorch-forecasting==1.6.1
pytorch-lightning==2.6.1
pytz==2025.2
pywinpty==3.0.3
PyYAML==6.0.3
pyzmq==27.1.0
referencing==0.37.0
regex==2026.1.15
requests==2.32.5
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rfc3987-syntax==1.1.0
rgf-python==3.12.0
rich==14.3.2
rich-argparse==1.7.2
rouge_score==0.1.2
rpds-py==0.30.0
rsa==4.9.1
safetensors==0.7.0
scikit-base==0.13.1
scikit-learn==1.8.0
scipy==1.17.0
Send2Trash==2.1.0
seqeval==1.2.2
shellingham==1.5.4
six==1.17.0
smmap==5.0.2
soupsieve==2.8.3
SQLAlchemy==2.0.46
sqlparse==0.5.5
stack-data==0.6.3
starlette==0.52.1
statsmodels==0.14.6
stevedore==5.6.0
sympy==1.14.0
tensorboardX==2.6.4
terminado==0.18.1
thop==0.1.1.post2209072238
threadpoolctl==3.6.0
tinycss2==1.4.0
tokenizers==0.22.2
torch==2.10.0
torchmetrics==1.8.2
torchvision==0.25.0
tornado==6.5.4
tqdm==4.67.3
traitlets==5.14.3
transformers==5.1.0
typer==0.23.0
typer-slim==0.23.0
typing-inspection==0.4.2
typing_extensions==4.15.0
tzdata==2025.3
uri-template==1.3.0
urllib3==2.6.3
uvicorn==0.40.0
virtualenv==20.36.1
wcwidth==0.6.0
webcolors==25.10.0
webencodings==0.5.1
websocket-client==1.9.0
widgetsnbextension==4.0.15
workalendar==17.0.0
xgboost==3.2.0
xmltodict==1.0.2
xxhash==3.6.0
yarl==1.22.0
zipp==3.23.0

View File

@@ -0,0 +1,259 @@
absl-py==2.4.0
accelerate==1.12.0
aiohappyeyeballs==2.6.1
aiohttp==3.13.3
aiosignal==1.4.0
alembic==1.18.4
annotated-doc==0.0.4
annotated-types==0.7.0
anyio==4.12.1
argon2-cffi==25.1.0
argon2-cffi-bindings==25.1.0
arrow==1.4.0
asttokens==3.0.1
async-lru==2.1.0
attrs==25.4.0
autopage==0.6.0
babel==2.18.0
beautifulsoup4==4.14.3
bleach==6.3.0
cachetools==5.5.2
catboost==1.2.8
certifi==2026.1.4
cffi==2.0.0
cfgv==3.5.0
charset-normalizer==3.4.4
click==8.3.1
cliff==4.13.1
cloudpickle==3.1.2
cmaes==0.12.0
cmd2==3.2.0
cmdstanpy==1.3.0
colorlog==6.10.1
comm==0.2.3
contourpy==1.3.3
convertdate==2.4.1
coverage==7.13.4
cryptography==46.0.5
cuda-bindings==12.9.4
cuda-pathfinder==1.3.4
cycler==0.12.1
databricks-sdk==0.88.0
dataclasses==0.6
datasets==4.5.0
debugpy==1.8.20
decorator==5.2.1
defusedxml==0.7.1
dill==0.4.0
distlib==0.4.0
evaluate==0.4.6
executing==2.2.1
fastapi==0.129.0
fastjsonschema==2.21.2
filelock==3.24.0
-e git+https://github.com/microsoft/FLAML@7083042210ce43ef6dd6cd2d2b71896f23e645d5#egg=FLAML
fonttools==4.61.1
fqdn==1.5.1
frozenlist==1.8.0
fsspec==2025.10.0
gitdb==4.0.12
GitPython==3.1.46
google-auth==2.48.0
graphviz==0.21
greenlet==3.3.1
h11==0.16.0
hcrystalball==0.1.12
hf-xet==1.2.0
holidays==0.90
httpcore==1.0.9
httpx==0.28.1
huggingface_hub==1.4.1
identify==2.6.16
idna==3.11
importlib_metadata==8.7.1
importlib_resources==6.5.2
iniconfig==2.3.0
ipykernel==7.2.0
ipython==9.10.0
ipython_pygments_lexers==1.1.1
ipywidgets==8.1.8
isoduration==20.11.0
jedi==0.19.2
Jinja2==3.1.6
joblib==1.3.2
joblibspark==0.6.0
json5==0.13.0
jsonpointer==3.0.0
jsonschema==4.26.0
jsonschema-specifications==2025.9.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.12.0
jupyter-lsp==2.3.0
jupyter_client==8.8.0
jupyter_core==5.9.1
jupyter_server==2.17.0
jupyter_server_terminals==0.5.4
jupyterlab==4.5.4
jupyterlab_pygments==0.3.0
jupyterlab_server==2.28.0
jupyterlab_widgets==3.0.16
kiwisolver==1.4.9
lark==1.3.1
liac-arff==2.5.0
lightgbm==4.6.0
lightning==2.6.1
lightning-utilities==0.15.2
lunardate==0.2.2
Mako==1.3.10
markdown-it-py==4.0.0
MarkupSafe==3.0.3
matplotlib==3.10.8
matplotlib-inline==0.2.1
mdurl==0.1.2
minio==7.2.20
mistune==3.2.0
mlflow-skinny==2.22.1
mpmath==1.3.0
multidict==6.7.1
multiprocess==0.70.18
narwhals==2.16.0
nbclient==0.10.4
nbconvert==7.17.0
nbformat==5.10.4
nest-asyncio==1.6.0
networkx==3.6.1
nltk==3.9.2
nodeenv==1.10.0
notebook==7.5.3
notebook_shim==0.2.4
numpy==1.26.4
nvidia-cublas-cu12==12.8.4.1
nvidia-cuda-cupti-cu12==12.8.90
nvidia-cuda-nvrtc-cu12==12.8.93
nvidia-cuda-runtime-cu12==12.8.90
nvidia-cudnn-cu12==9.10.2.21
nvidia-cufft-cu12==11.3.3.83
nvidia-cufile-cu12==1.13.1.3
nvidia-curand-cu12==10.3.9.90
nvidia-cusolver-cu12==11.7.3.90
nvidia-cusparse-cu12==12.5.8.93
nvidia-cusparselt-cu12==0.7.1
nvidia-nccl-cu12==2.27.5
nvidia-nvjitlink-cu12==12.8.93
nvidia-nvshmem-cu12==3.4.5
nvidia-nvtx-cu12==12.8.90
openml==0.15.1
opentelemetry-api==1.39.1
opentelemetry-sdk==1.39.1
opentelemetry-semantic-conventions==0.60b1
optuna==2.8.0
packaging==24.2
pandas==2.3.3
pandocfilters==1.5.1
parso==0.8.6
patsy==1.0.2
pexpect==4.9.0
pillow==12.1.1
platformdirs==4.9.1
plotly==6.5.2
pluggy==1.6.0
pre_commit==4.5.1
prettytable==3.17.0
prometheus_client==0.24.1
prompt_toolkit==3.0.52
propcache==0.4.1
prophet==1.3.0
protobuf==6.33.5
psutil==7.2.2
ptyprocess==0.7.0
pure_eval==0.2.3
py4j==0.10.9.9
pyarrow==23.0.0
pyasn1==0.6.2
pyasn1_modules==0.4.2
pycparser==3.0
pycryptodome==3.23.0
pydantic==2.12.5
pydantic_core==2.41.5
Pygments==2.19.2
pyluach==2.3.0
PyMeeus==0.5.12
pyparsing==3.3.2
pyperclip==1.11.0
pyspark==4.0.1
pytest==9.0.2
pytest-rerunfailures==16.1
python-dateutil==2.9.0.post0
python-json-logger==4.0.0
pytorch-forecasting==1.6.1
pytorch-lightning==2.6.1
pytz==2025.2
PyYAML==6.0.3
pyzmq==27.1.0
referencing==0.37.0
regex==2026.1.15
requests==2.32.5
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rfc3987-syntax==1.1.0
rgf-python==3.12.0
rich==14.3.2
rich-argparse==1.7.2
rouge_score==0.1.2
rpds-py==0.30.0
rsa==4.9.1
safetensors==0.7.0
scikit-base==0.13.1
scikit-learn==1.8.0
scipy==1.17.0
Send2Trash==2.1.0
seqeval==1.2.2
setuptools==81.0.0
shellingham==1.5.4
six==1.17.0
smmap==5.0.2
soupsieve==2.8.3
SQLAlchemy==2.0.46
sqlparse==0.5.5
stack-data==0.6.3
stanio==0.5.1
starlette==0.52.1
statsmodels==0.14.6
stevedore==5.6.0
sympy==1.14.0
tensorboardX==2.6.4
terminado==0.18.1
thop==0.1.1.post2209072238
threadpoolctl==3.6.0
tinycss2==1.4.0
tokenizers==0.22.2
torch==2.10.0
torchmetrics==1.8.2
torchvision==0.25.0
tornado==6.5.4
tqdm==4.67.3
traitlets==5.14.3
transformers==5.1.0
triton==3.6.0
typer==0.23.1
typer-slim==0.23.1
typing-inspection==0.4.2
typing_extensions==4.15.0
tzdata==2025.3
uri-template==1.3.0
urllib3==2.6.3
uvicorn==0.40.0
virtualenv==20.36.1
wcwidth==0.6.0
webcolors==25.10.0
webencodings==0.5.1
websocket-client==1.9.0
wheel==0.46.3
widgetsnbextension==4.0.15
workalendar==17.0.0
xgboost==3.2.0
xmltodict==1.0.2
xxhash==3.6.0
yarl==1.22.0
zipp==3.23.0

View File

@@ -0,0 +1,238 @@
absl-py==2.4.0
accelerate==1.12.0
aiohappyeyeballs==2.6.1
aiohttp==3.13.3
aiosignal==1.4.0
alembic==1.18.4
annotated-doc==0.0.4
annotated-types==0.7.0
anyio==4.12.1
argcomplete==3.6.3
argon2-cffi==25.1.0
argon2-cffi-bindings==25.1.0
arrow==1.4.0
asttokens==3.0.1
async-lru==2.1.0
attrs==25.4.0
autopage==0.6.0
babel==2.18.0
beautifulsoup4==4.14.3
bleach==6.3.0
cachetools==5.5.2
catboost==1.2.8
certifi==2026.1.4
cffi==2.0.0
cfgv==3.5.0
charset-normalizer==3.4.4
click==8.3.1
cliff==4.13.1
cloudpickle==3.1.2
cmaes==0.12.0
cmd2==3.2.0
colorama==0.4.6
colorlog==6.10.1
comm==0.2.3
contourpy==1.3.3
convertdate==2.4.1
coverage==7.13.4
cryptography==46.0.5
cycler==0.12.1
databricks-sdk==0.88.0
dataclasses==0.6
datasets==4.5.0
debugpy==1.8.20
decorator==5.2.1
defusedxml==0.7.1
dill==0.4.0
distlib==0.4.0
evaluate==0.4.6
executing==2.2.1
fastapi==0.129.0
fastjsonschema==2.21.2
filelock==3.24.0
-e git+https://github.com/microsoft/FLAML@579f795f333e6eb7e7e9c769a1f24721bc86700b#egg=FLAML
fonttools==4.61.1
fqdn==1.5.1
frozenlist==1.8.0
fsspec==2025.10.0
gitdb==4.0.12
GitPython==3.1.46
google-auth==2.48.0
graphviz==0.21
greenlet==3.3.1
h11==0.16.0
hcrystalball==0.1.12
hf-xet==1.2.0
httpcore==1.0.9
httpx==0.28.1
huggingface_hub==1.4.1
identify==2.6.16
idna==3.11
importlib_metadata==8.7.1
iniconfig==2.3.0
ipykernel==7.2.0
ipython==9.10.0
ipython_pygments_lexers==1.1.1
ipywidgets==8.1.8
isoduration==20.11.0
jedi==0.19.2
Jinja2==3.1.6
joblib==1.3.2
joblibspark==0.6.0
json5==0.13.0
jsonpointer==3.0.0
jsonschema==4.26.0
jsonschema-specifications==2025.9.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.12.0
jupyter-lsp==2.3.0
jupyter_client==8.8.0
jupyter_core==5.9.1
jupyter_server==2.17.0
jupyter_server_terminals==0.5.4
jupyterlab==4.5.4
jupyterlab_pygments==0.3.0
jupyterlab_server==2.28.0
jupyterlab_widgets==3.0.16
kiwisolver==1.4.9
lark==1.3.1
liac-arff==2.5.0
lightgbm==4.6.0
lightning==2.6.1
lightning-utilities==0.15.2
lunardate==0.2.2
Mako==1.3.10
markdown-it-py==4.0.0
MarkupSafe==3.0.3
matplotlib==3.10.8
matplotlib-inline==0.2.1
mdurl==0.1.2
minio==7.2.20
mistune==3.2.0
mlflow-skinny==2.22.1
mpmath==1.3.0
multidict==6.7.1
multiprocess==0.70.18
narwhals==2.16.0
nbclient==0.10.4
nbconvert==7.17.0
nbformat==5.10.4
nest-asyncio==1.6.0
networkx==3.6.1
nltk==3.9.2
nodeenv==1.10.0
notebook==7.5.3
notebook_shim==0.2.4
numpy==1.26.4
openml==0.15.1
opentelemetry-api==1.39.1
opentelemetry-sdk==1.39.1
opentelemetry-semantic-conventions==0.60b1
optuna==2.8.0
packaging==24.2
pandas==2.3.3
pandocfilters==1.5.1
parso==0.8.6
patsy==1.0.2
pillow==12.1.1
pipx==1.8.0
platformdirs==4.5.1
plotly==6.5.2
pluggy==1.6.0
pre_commit==4.5.1
prettytable==3.17.0
prometheus_client==0.24.1
prompt_toolkit==3.0.52
propcache==0.4.1
protobuf==6.33.5
psutil==7.2.2
pure_eval==0.2.3
pyarrow==23.0.0
pyasn1==0.6.2
pyasn1_modules==0.4.2
pycparser==3.0
pycryptodome==3.23.0
pydantic==2.12.5
pydantic_core==2.41.5
Pygments==2.19.2
pyluach==2.3.0
PyMeeus==0.5.12
pyparsing==3.3.2
pyperclip==1.11.0
pyreadline3==3.5.4
pytest==9.0.2
pytest-rerunfailures==16.1
python-dateutil==2.9.0.post0
python-json-logger==4.0.0
pytorch-forecasting==1.6.1
pytorch-lightning==2.6.1
pytz==2025.2
pywinpty==3.0.3
PyYAML==6.0.3
pyzmq==27.1.0
referencing==0.37.0
regex==2026.1.15
requests==2.32.5
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rfc3987-syntax==1.1.0
rgf-python==3.12.0
rich==14.3.2
rich-argparse==1.7.2
rouge_score==0.1.2
rpds-py==0.30.0
rsa==4.9.1
safetensors==0.7.0
scikit-base==0.13.1
scikit-learn==1.8.0
scipy==1.17.0
Send2Trash==2.1.0
seqeval==1.2.2
setuptools==81.0.0
shellingham==1.5.4
six==1.17.0
smmap==5.0.2
soupsieve==2.8.3
SQLAlchemy==2.0.46
sqlparse==0.5.5
stack-data==0.6.3
starlette==0.52.1
statsmodels==0.14.6
stevedore==5.6.0
sympy==1.14.0
tensorboardX==2.6.4
terminado==0.18.1
thop==0.1.1.post2209072238
threadpoolctl==3.6.0
tinycss2==1.4.0
tokenizers==0.22.2
torch==2.10.0
torchmetrics==1.8.2
torchvision==0.25.0
tornado==6.5.4
tqdm==4.67.3
traitlets==5.14.3
transformers==5.1.0
typer==0.23.1
typer-slim==0.23.1
typing-inspection==0.4.2
typing_extensions==4.15.0
tzdata==2025.3
uri-template==1.3.0
urllib3==2.6.3
userpath==1.9.2
uvicorn==0.40.0
virtualenv==20.36.1
wcwidth==0.6.0
webcolors==25.10.0
webencodings==0.5.1
websocket-client==1.9.0
wheel==0.46.3
widgetsnbextension==4.0.15
workalendar==17.0.0
xgboost==3.2.0
xmltodict==1.0.2
xxhash==3.6.0
yarl==1.22.0
zipp==3.23.0

View File

@@ -0,0 +1,259 @@
absl-py==2.4.0
accelerate==1.12.0
aiohappyeyeballs==2.6.1
aiohttp==3.13.3
aiosignal==1.4.0
alembic==1.18.4
annotated-doc==0.0.4
annotated-types==0.7.0
anyio==4.12.1
argon2-cffi==25.1.0
argon2-cffi-bindings==25.1.0
arrow==1.4.0
asttokens==3.0.1
async-lru==2.1.0
attrs==25.4.0
autopage==0.6.0
babel==2.18.0
beautifulsoup4==4.14.3
bleach==6.3.0
cachetools==5.5.2
catboost==1.2.8
certifi==2026.1.4
cffi==2.0.0
cfgv==3.5.0
charset-normalizer==3.4.4
click==8.3.1
cliff==4.13.1
cloudpickle==3.1.2
cmaes==0.12.0
cmd2==3.2.0
cmdstanpy==1.3.0
colorlog==6.10.1
comm==0.2.3
contourpy==1.3.3
convertdate==2.4.1
coverage==7.13.4
cryptography==46.0.5
cuda-bindings==12.9.4
cuda-pathfinder==1.3.4
cycler==0.12.1
databricks-sdk==0.88.0
dataclasses==0.6
datasets==4.5.0
debugpy==1.8.20
decorator==5.2.1
defusedxml==0.7.1
dill==0.4.0
distlib==0.4.0
evaluate==0.4.6
executing==2.2.1
fastapi==0.129.0
fastjsonschema==2.21.2
filelock==3.24.0
-e git+https://github.com/microsoft/FLAML@1d301b413c65128d61006a4c19a95a911ca308ad#egg=FLAML
fonttools==4.61.1
fqdn==1.5.1
frozenlist==1.8.0
fsspec==2025.10.0
gitdb==4.0.12
GitPython==3.1.46
google-auth==2.48.0
graphviz==0.21
greenlet==3.3.1
h11==0.16.0
hcrystalball==0.1.12
hf-xet==1.2.0
holidays==0.90
httpcore==1.0.9
httpx==0.28.1
huggingface_hub==1.4.1
identify==2.6.16
idna==3.11
importlib_metadata==8.7.1
importlib_resources==6.5.2
iniconfig==2.3.0
ipykernel==7.2.0
ipython==9.10.0
ipython_pygments_lexers==1.1.1
ipywidgets==8.1.8
isoduration==20.11.0
jedi==0.19.2
Jinja2==3.1.6
joblib==1.3.2
joblibspark==0.6.0
json5==0.13.0
jsonpointer==3.0.0
jsonschema==4.26.0
jsonschema-specifications==2025.9.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.12.0
jupyter-lsp==2.3.0
jupyter_client==8.8.0
jupyter_core==5.9.1
jupyter_server==2.17.0
jupyter_server_terminals==0.5.4
jupyterlab==4.5.4
jupyterlab_pygments==0.3.0
jupyterlab_server==2.28.0
jupyterlab_widgets==3.0.16
kiwisolver==1.4.9
lark==1.3.1
liac-arff==2.5.0
lightgbm==4.6.0
lightning==2.6.1
lightning-utilities==0.15.2
lunardate==0.2.2
Mako==1.3.10
markdown-it-py==4.0.0
MarkupSafe==3.0.3
matplotlib==3.10.8
matplotlib-inline==0.2.1
mdurl==0.1.2
minio==7.2.20
mistune==3.2.0
mlflow-skinny==2.22.1
mpmath==1.3.0
multidict==6.7.1
multiprocess==0.70.18
narwhals==2.16.0
nbclient==0.10.4
nbconvert==7.17.0
nbformat==5.10.4
nest-asyncio==1.6.0
networkx==3.6.1
nltk==3.9.2
nodeenv==1.10.0
notebook==7.5.3
notebook_shim==0.2.4
numpy==2.4.2
nvidia-cublas-cu12==12.8.4.1
nvidia-cuda-cupti-cu12==12.8.90
nvidia-cuda-nvrtc-cu12==12.8.93
nvidia-cuda-runtime-cu12==12.8.90
nvidia-cudnn-cu12==9.10.2.21
nvidia-cufft-cu12==11.3.3.83
nvidia-cufile-cu12==1.13.1.3
nvidia-curand-cu12==10.3.9.90
nvidia-cusolver-cu12==11.7.3.90
nvidia-cusparse-cu12==12.5.8.93
nvidia-cusparselt-cu12==0.7.1
nvidia-nccl-cu12==2.27.5
nvidia-nvjitlink-cu12==12.8.93
nvidia-nvshmem-cu12==3.4.5
nvidia-nvtx-cu12==12.8.90
openml==0.15.1
opentelemetry-api==1.39.1
opentelemetry-sdk==1.39.1
opentelemetry-semantic-conventions==0.60b1
optuna==2.8.0
packaging==24.2
pandas==2.3.3
pandocfilters==1.5.1
parso==0.8.6
patsy==1.0.2
pexpect==4.9.0
pillow==12.1.1
platformdirs==4.9.1
plotly==6.5.2
pluggy==1.6.0
pre_commit==4.5.1
prettytable==3.17.0
prometheus_client==0.24.1
prompt_toolkit==3.0.52
propcache==0.4.1
prophet==1.3.0
protobuf==6.33.5
psutil==7.2.2
ptyprocess==0.7.0
pure_eval==0.2.3
py4j==0.10.9.9
pyarrow==23.0.0
pyasn1==0.6.2
pyasn1_modules==0.4.2
pycparser==3.0
pycryptodome==3.23.0
pydantic==2.12.5
pydantic_core==2.41.5
Pygments==2.19.2
pyluach==2.3.0
PyMeeus==0.5.12
pyparsing==3.3.2
pyperclip==1.11.0
pyspark==4.1.0
pytest==9.0.2
pytest-rerunfailures==16.1
python-dateutil==2.9.0.post0
python-json-logger==4.0.0
pytorch-forecasting==1.6.1
pytorch-lightning==2.6.1
pytz==2025.2
PyYAML==6.0.3
pyzmq==27.1.0
referencing==0.37.0
regex==2026.1.15
requests==2.32.5
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rfc3987-syntax==1.1.0
rgf-python==3.12.0
rich==14.3.2
rich-argparse==1.7.2
rouge_score==0.1.2
rpds-py==0.30.0
rsa==4.9.1
safetensors==0.7.0
scikit-base==0.13.1
scikit-learn==1.8.0
scipy==1.17.0
Send2Trash==2.1.0
seqeval==1.2.2
setuptools==81.0.0
shellingham==1.5.4
six==1.17.0
smmap==5.0.2
soupsieve==2.8.3
SQLAlchemy==2.0.46
sqlparse==0.5.5
stack-data==0.6.3
stanio==0.5.1
starlette==0.52.1
statsmodels==0.14.6
stevedore==5.6.0
sympy==1.14.0
tensorboardX==2.6.4
terminado==0.18.1
thop==0.1.1.post2209072238
threadpoolctl==3.6.0
tinycss2==1.4.0
tokenizers==0.22.2
torch==2.10.0
torchmetrics==1.8.2
torchvision==0.25.0
tornado==6.5.4
tqdm==4.67.3
traitlets==5.14.3
transformers==5.1.0
triton==3.6.0
typer==0.23.1
typer-slim==0.23.1
typing-inspection==0.4.2
typing_extensions==4.15.0
tzdata==2025.3
uri-template==1.3.0
urllib3==2.6.3
uvicorn==0.40.0
virtualenv==20.36.1
wcwidth==0.6.0
webcolors==25.10.0
webencodings==0.5.1
websocket-client==1.9.0
wheel==0.46.3
widgetsnbextension==4.0.15
workalendar==17.0.0
xgboost==3.2.0
xmltodict==1.0.2
xxhash==3.6.0
yarl==1.22.0
zipp==3.23.0

View File

@@ -0,0 +1,235 @@
absl-py==2.4.0
accelerate==1.12.0
aiohappyeyeballs==2.6.1
aiohttp==3.13.3
aiosignal==1.4.0
alembic==1.18.4
annotated-doc==0.0.4
annotated-types==0.7.0
anyio==4.12.1
argon2-cffi==25.1.0
argon2-cffi-bindings==25.1.0
arrow==1.4.0
asttokens==3.0.1
async-lru==2.1.0
attrs==25.4.0
autopage==0.6.0
babel==2.18.0
beautifulsoup4==4.14.3
bleach==6.3.0
cachetools==5.5.2
catboost==1.2.8
certifi==2026.1.4
cffi==2.0.0
cfgv==3.5.0
charset-normalizer==3.4.4
click==8.3.1
cliff==4.13.1
cloudpickle==3.1.2
cmaes==0.12.0
cmd2==3.2.0
colorama==0.4.6
colorlog==6.10.1
comm==0.2.3
contourpy==1.3.3
convertdate==2.4.1
coverage==7.13.4
cryptography==46.0.5
cycler==0.12.1
databricks-sdk==0.88.0
dataclasses==0.6
datasets==4.5.0
debugpy==1.8.20
decorator==5.2.1
defusedxml==0.7.1
dill==0.4.0
distlib==0.4.0
evaluate==0.4.6
executing==2.2.1
fastapi==0.129.0
fastjsonschema==2.21.2
filelock==3.24.0
-e git+https://github.com/microsoft/FLAML@897eae6616b1c24037ce8d3dae6c0aa3263e85a9#egg=FLAML
fonttools==4.61.1
fqdn==1.5.1
frozenlist==1.8.0
fsspec==2025.10.0
gitdb==4.0.12
GitPython==3.1.46
google-auth==2.48.0
graphviz==0.21
greenlet==3.3.1
h11==0.16.0
hcrystalball==0.1.12
hf-xet==1.2.0
httpcore==1.0.9
httpx==0.28.1
huggingface_hub==1.4.1
identify==2.6.16
idna==3.11
importlib_metadata==8.7.1
iniconfig==2.3.0
ipykernel==7.2.0
ipython==9.10.0
ipython_pygments_lexers==1.1.1
ipywidgets==8.1.8
isoduration==20.11.0
jedi==0.19.2
Jinja2==3.1.6
joblib==1.3.2
joblibspark==0.6.0
json5==0.13.0
jsonpointer==3.0.0
jsonschema==4.26.0
jsonschema-specifications==2025.9.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.12.0
jupyter-lsp==2.3.0
jupyter_client==8.8.0
jupyter_core==5.9.1
jupyter_server==2.17.0
jupyter_server_terminals==0.5.4
jupyterlab==4.5.4
jupyterlab_pygments==0.3.0
jupyterlab_server==2.28.0
jupyterlab_widgets==3.0.16
kiwisolver==1.4.9
lark==1.3.1
liac-arff==2.5.0
lightgbm==4.6.0
lightning==2.6.1
lightning-utilities==0.15.2
lunardate==0.2.2
Mako==1.3.10
markdown-it-py==4.0.0
MarkupSafe==3.0.3
matplotlib==3.10.8
matplotlib-inline==0.2.1
mdurl==0.1.2
minio==7.2.20
mistune==3.2.0
mlflow-skinny==2.22.1
mpmath==1.3.0
multidict==6.7.1
multiprocess==0.70.18
narwhals==2.16.0
nbclient==0.10.4
nbconvert==7.17.0
nbformat==5.10.4
nest-asyncio==1.6.0
networkx==3.6.1
nltk==3.9.2
nodeenv==1.10.0
notebook==7.5.3
notebook_shim==0.2.4
numpy==2.4.2
openml==0.15.1
opentelemetry-api==1.39.1
opentelemetry-sdk==1.39.1
opentelemetry-semantic-conventions==0.60b1
optuna==2.8.0
packaging==24.2
pandas==2.3.3
pandocfilters==1.5.1
parso==0.8.6
patsy==1.0.2
pillow==12.1.1
platformdirs==4.9.1
plotly==6.5.2
pluggy==1.6.0
pre_commit==4.5.1
prettytable==3.17.0
prometheus_client==0.24.1
prompt_toolkit==3.0.52
propcache==0.4.1
protobuf==6.33.5
psutil==7.2.2
pure_eval==0.2.3
pyarrow==23.0.0
pyasn1==0.6.2
pyasn1_modules==0.4.2
pycparser==3.0
pycryptodome==3.23.0
pydantic==2.12.5
pydantic_core==2.41.5
Pygments==2.19.2
pyluach==2.3.0
PyMeeus==0.5.12
pyparsing==3.3.2
pyperclip==1.11.0
pyreadline3==3.5.4
pytest==9.0.2
pytest-rerunfailures==16.1
python-dateutil==2.9.0.post0
python-json-logger==4.0.0
pytorch-forecasting==1.6.1
pytorch-lightning==2.6.1
pytz==2025.2
pywinpty==3.0.3
PyYAML==6.0.3
pyzmq==27.1.0
referencing==0.37.0
regex==2026.1.15
requests==2.32.5
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rfc3987-syntax==1.1.0
rgf-python==3.12.0
rich==14.3.2
rich-argparse==1.7.2
rouge_score==0.1.2
rpds-py==0.30.0
rsa==4.9.1
safetensors==0.7.0
scikit-base==0.13.1
scikit-learn==1.8.0
scipy==1.17.0
Send2Trash==2.1.0
seqeval==1.2.2
setuptools==81.0.0
shellingham==1.5.4
six==1.17.0
smmap==5.0.2
soupsieve==2.8.3
SQLAlchemy==2.0.46
sqlparse==0.5.5
stack-data==0.6.3
starlette==0.52.1
statsmodels==0.14.6
stevedore==5.6.0
sympy==1.14.0
tensorboardX==2.6.4
terminado==0.18.1
thop==0.1.1.post2209072238
threadpoolctl==3.6.0
tinycss2==1.4.0
tokenizers==0.22.2
torch==2.10.0
torchmetrics==1.8.2
torchvision==0.25.0
tornado==6.5.4
tqdm==4.67.3
traitlets==5.14.3
transformers==5.1.0
typer==0.23.1
typer-slim==0.23.1
typing-inspection==0.4.2
typing_extensions==4.15.0
tzdata==2025.3
uri-template==1.3.0
urllib3==2.6.3
uvicorn==0.40.0
virtualenv==20.36.1
wcwidth==0.6.0
webcolors==25.10.0
webencodings==0.5.1
websocket-client==1.9.0
wheel==0.46.3
widgetsnbextension==4.0.15
workalendar==17.0.0
xgboost==3.2.0
xmltodict==1.0.2
xxhash==3.6.0
yarl==1.22.0
zipp==3.23.0

View File

@@ -0,0 +1,42 @@
catboost==1.2.8
coverage==7.13.4
dataclasses==0.6
datasets==4.5.0
dill==0.4.0
evaluate==0.4.6
hcrystalball==0.1.12
ipykernel==7.2.0
joblib==1.3.2
joblibspark==0.6.0
jupyter==1.1.1
lightgbm==4.6.0
mlflow-skinny==2.22.1
nbconvert==7.17.0
nbformat==5.10.4
nltk==3.9.2
numpy==1.26.4
openml==0.15.1
optuna==2.8.0
packaging==24.2
pandas==2.3.3
prophet==1.3.0
psutil==7.2.2
pytest-rerunfailures==16.1
pytest==9.0.2
pytorch-forecasting==1.6.1
pytorch-lightning==2.6.1
requests==2.32.5
rgf-python==3.12.0
rouge_score==0.1.2
scikit-learn==1.7.2
scipy==1.15.3
seqeval==1.2.2
statsmodels==0.14.6
tensorboardX==2.6.4
thop==0.1.1-2209072238
torch==2.10.0
torchvision==0.25.0
transformers==5.1.0
xgboost==1.7.6
xgboost==1.7.6
Current commit hash: cc903f78074f93f2fcf644c046c9e3036ce7fb38

View File

@@ -0,0 +1,41 @@
catboost==1.2.8
coverage==7.13.4
dataclasses==0.6
datasets==4.5.0
dill==0.4.0
evaluate==0.4.6
hcrystalball==0.1.12
ipykernel==7.2.0
joblib==1.3.2
joblibspark==0.6.0
jupyter==1.1.1
lightgbm==4.6.0
mlflow-skinny==2.22.1
nbconvert==7.17.0
nbformat==5.10.4
nltk==3.9.2
numpy==1.26.4
openml==0.15.1
optuna==2.8.0
packaging==24.2
pandas==2.3.3
psutil==7.2.2
pytest-rerunfailures==16.1
pytest==9.0.2
pytorch-forecasting==1.6.1
pytorch-lightning==2.6.1
requests==2.32.5
rgf-python==3.12.0
rouge_score==0.1.2
scikit-learn==1.7.2
scipy==1.15.3
seqeval==1.2.2
statsmodels==0.14.6
tensorboardX==2.6.4
thop==0.1.1-2209072238
torch==2.10.0
torchvision==0.25.0
transformers==5.1.0
xgboost==1.7.6
xgboost==1.7.6
Current commit hash: 6f573938bf46a906dfba1101034fd05813268c29

View File

@@ -0,0 +1,39 @@
catboost==1.2.8
coverage==7.13.1
dataclasses==0.6
datasets==4.4.2
dill==0.4.0
evaluate==0.4.6
hcrystalball==0.1.12
ipykernel==7.1.0
joblib==1.3.2
joblibspark==0.6.0
jupyter==1.1.1
lightgbm==4.6.0
mlflow-skinny==2.22.1
nbconvert==7.16.6
nbformat==5.10.4
nltk==3.9.2
numpy==1.26.4
openml==0.15.1
optuna==3.6.1
packaging==24.2
pandas==2.3.3
psutil==7.2.1
pytest-rerunfailures==16.1
pytest==9.0.2
pytorch-forecasting==1.5.0
pytorch-lightning==2.6.0
requests==2.32.5
rouge_score==0.1.2
scikit-learn==1.8.0
scipy==1.16.3
seqeval==1.2.2
statsmodels==0.14.6
tensorboardX==2.6.4
thop==0.1.1-2209072238
torch==2.9.1
torchvision==0.24.1
transformers==4.57.3
xgboost==3.1.3
Current commit hash: 3ab9ce3cda330a54210c591e89b7f8674948d607

View File

@@ -0,0 +1,42 @@
catboost==1.2.8
coverage==7.13.4
dataclasses==0.6
datasets==4.5.0
dill==0.4.0
evaluate==0.4.6
hcrystalball==0.1.12
ipykernel==7.2.0
joblib==1.3.2
joblibspark==0.6.0
jupyter==1.1.1
lightgbm==4.6.0
mlflow-skinny==2.22.1
nbconvert==7.17.0
nbformat==5.10.4
nltk==3.9.2
numpy==1.26.4
openml==0.15.1
optuna==2.8.0
packaging==24.2
pandas==2.3.3
prophet==1.3.0
psutil==7.2.2
pytest-rerunfailures==16.1
pytest==9.0.2
pytorch-forecasting==1.6.1
pytorch-lightning==2.6.1
requests==2.32.5
rgf-python==3.12.0
rouge_score==0.1.2
scikit-learn==1.8.0
scipy==1.17.0
seqeval==1.2.2
statsmodels==0.14.6
tensorboardX==2.6.4
thop==0.1.1-2209072238
torch==2.10.0
torchvision==0.25.0
transformers==5.1.0
xgboost==3.2.0
xgboost==3.2.0
Current commit hash: 4ac0347e5a032ff01b3ad05ac51863cfbd1a1b62

View File

@@ -0,0 +1,41 @@
catboost==1.2.8
coverage==7.13.4
dataclasses==0.6
datasets==4.5.0
dill==0.4.0
evaluate==0.4.6
hcrystalball==0.1.12
ipykernel==7.2.0
joblib==1.3.2
joblibspark==0.6.0
jupyter==1.1.1
lightgbm==4.6.0
mlflow-skinny==2.22.1
nbconvert==7.17.0
nbformat==5.10.4
nltk==3.9.2
numpy==1.26.4
openml==0.15.1
optuna==2.8.0
packaging==24.2
pandas==2.3.3
psutil==7.2.2
pytest-rerunfailures==16.1
pytest==9.0.2
pytorch-forecasting==1.6.1
pytorch-lightning==2.6.1
requests==2.32.5
rgf-python==3.12.0
rouge_score==0.1.2
scikit-learn==1.8.0
scipy==1.17.0
seqeval==1.2.2
statsmodels==0.14.6
tensorboardX==2.6.4
thop==0.1.1-2209072238
torch==2.10.0
torchvision==0.25.0
transformers==5.1.0
xgboost==3.2.0
xgboost==3.2.0
Current commit hash: 06111054a40bde57cca96dccd7476a3211b19893

View File

@@ -0,0 +1,42 @@
catboost==1.2.8
coverage==7.13.4
dataclasses==0.6
datasets==4.5.0
dill==0.4.0
evaluate==0.4.6
hcrystalball==0.1.12
ipykernel==7.2.0
joblib==1.3.2
joblibspark==0.6.0
jupyter==1.1.1
lightgbm==4.6.0
mlflow-skinny==2.22.1
nbconvert==7.17.0
nbformat==5.10.4
nltk==3.9.2
numpy==1.26.4
openml==0.15.1
optuna==2.8.0
packaging==24.2
pandas==2.3.3
prophet==1.3.0
psutil==7.2.2
pytest-rerunfailures==16.1
pytest==9.0.2
pytorch-forecasting==1.6.1
pytorch-lightning==2.6.1
requests==2.32.5
rgf-python==3.12.0
rouge_score==0.1.2
scikit-learn==1.8.0
scipy==1.17.0
seqeval==1.2.2
statsmodels==0.14.6
tensorboardX==2.6.4
thop==0.1.1-2209072238
torch==2.10.0
torchvision==0.25.0
transformers==5.1.0
xgboost==3.2.0
xgboost==3.2.0
Current commit hash: 7083042210ce43ef6dd6cd2d2b71896f23e645d5

View File

@@ -0,0 +1,41 @@
catboost==1.2.8
coverage==7.13.4
dataclasses==0.6
datasets==4.5.0
dill==0.4.0
evaluate==0.4.6
hcrystalball==0.1.12
ipykernel==7.2.0
joblib==1.3.2
joblibspark==0.6.0
jupyter==1.1.1
lightgbm==4.6.0
mlflow-skinny==2.22.1
nbconvert==7.17.0
nbformat==5.10.4
nltk==3.9.2
numpy==1.26.4
openml==0.15.1
optuna==2.8.0
packaging==24.2
pandas==2.3.3
psutil==7.2.2
pytest-rerunfailures==16.1
pytest==9.0.2
pytorch-forecasting==1.6.1
pytorch-lightning==2.6.1
requests==2.32.5
rgf-python==3.12.0
rouge_score==0.1.2
scikit-learn==1.8.0
scipy==1.17.0
seqeval==1.2.2
statsmodels==0.14.6
tensorboardX==2.6.4
thop==0.1.1-2209072238
torch==2.10.0
torchvision==0.25.0
transformers==5.1.0
xgboost==3.2.0
xgboost==3.2.0
Current commit hash: 579f795f333e6eb7e7e9c769a1f24721bc86700b

View File

@@ -0,0 +1,42 @@
catboost==1.2.8
coverage==7.13.4
dataclasses==0.6
datasets==4.5.0
dill==0.4.0
evaluate==0.4.6
hcrystalball==0.1.12
ipykernel==7.2.0
joblib==1.3.2
joblibspark==0.6.0
jupyter==1.1.1
lightgbm==4.6.0
mlflow-skinny==2.22.1
nbconvert==7.17.0
nbformat==5.10.4
nltk==3.9.2
numpy==2.4.2
openml==0.15.1
optuna==2.8.0
packaging==24.2
pandas==2.3.3
prophet==1.3.0
psutil==7.2.2
pytest-rerunfailures==16.1
pytest==9.0.2
pytorch-forecasting==1.6.1
pytorch-lightning==2.6.1
requests==2.32.5
rgf-python==3.12.0
rouge_score==0.1.2
scikit-learn==1.8.0
scipy==1.17.0
seqeval==1.2.2
statsmodels==0.14.6
tensorboardX==2.6.4
thop==0.1.1-2209072238
torch==2.10.0
torchvision==0.25.0
transformers==5.1.0
xgboost==3.2.0
xgboost==3.2.0
Current commit hash: 1d301b413c65128d61006a4c19a95a911ca308ad

View File

@@ -0,0 +1,41 @@
catboost==1.2.8
coverage==7.13.4
dataclasses==0.6
datasets==4.5.0
dill==0.4.0
evaluate==0.4.6
hcrystalball==0.1.12
ipykernel==7.2.0
joblib==1.3.2
joblibspark==0.6.0
jupyter==1.1.1
lightgbm==4.6.0
mlflow-skinny==2.22.1
nbconvert==7.17.0
nbformat==5.10.4
nltk==3.9.2
numpy==2.4.2
openml==0.15.1
optuna==2.8.0
packaging==24.2
pandas==2.3.3
psutil==7.2.2
pytest-rerunfailures==16.1
pytest==9.0.2
pytorch-forecasting==1.6.1
pytorch-lightning==2.6.1
requests==2.32.5
rgf-python==3.12.0
rouge_score==0.1.2
scikit-learn==1.8.0
scipy==1.17.0
seqeval==1.2.2
statsmodels==0.14.6
tensorboardX==2.6.4
thop==0.1.1-2209072238
torch==2.10.0
torchvision==0.25.0
transformers==5.1.0
xgboost==3.2.0
xgboost==3.2.0
Current commit hash: 897eae6616b1c24037ce8d3dae6c0aa3263e85a9

3
pytest.ini Normal file
View File

@@ -0,0 +1,3 @@
[pytest]
markers =
spark: mark a test as requiring Spark

View File

@@ -51,60 +51,59 @@ setuptools.setup(
"joblib<=1.3.2",
],
"test": [
"jupyter",
"numpy>=1.17,<2.0.0; python_version<'3.13'",
"numpy>2.0.0; python_version>='3.13'",
"jupyter; python_version<'3.13'",
"lightgbm>=2.3.1",
"xgboost>=0.90,<2.0.0",
"xgboost>=0.90,<2.0.0; python_version<'3.11'",
"xgboost>=2.0.0; python_version>='3.11'",
"scipy>=1.4.1",
"pandas>=1.1.4,<2.0.0; python_version<'3.10'",
"pandas>=1.1.4; python_version>='3.10'",
"scikit-learn>=1.0.0",
"scikit-learn>=1.2.0",
"thop",
"pytest>=6.1.1",
"pytest-rerunfailures>=13.0",
"coverage>=5.3",
"pre-commit",
"torch",
"torchvision",
"catboost>=0.26,<1.2; python_version<'3.11'",
"catboost>=0.26; python_version>='3.11'",
"catboost>=0.26; python_version<'3.13'",
"rgf-python",
"optuna>=2.8.0,<=3.6.1",
"openml",
"openml; python_version<'3.13'",
"statsmodels>=0.12.2",
"psutil==5.8.0",
"psutil",
"dataclasses",
"transformers[torch]==4.26",
"transformers[torch]",
"datasets",
"nltk<=3.8.1", # 3.8.2 doesn't work with mlflow
"evaluate",
"nltk!=3.8.2", # 3.8.2 doesn't work with mlflow
"rouge_score",
"hcrystalball==0.1.10",
"hcrystalball",
"seqeval",
"pytorch-forecasting>=0.9.0,<=0.10.1; python_version<'3.11'",
# "pytorch-forecasting==0.10.1; python_version=='3.11'",
"mlflow==2.15.1",
"pytorch-forecasting; python_version<'3.13'",
"mlflow-skinny<=2.22.1", # Refer to https://mvnrepository.com/artifact/org.mlflow/mlflow-spark
"joblibspark>=0.5.0",
"joblib<=1.3.2",
"nbconvert",
"nbformat",
"ipykernel",
"pytorch-lightning<1.9.1", # test_forecast_panel
"tensorboardX==2.6", # test_forecast_panel
"requests<2.29.0", # https://github.com/docker/docker-py/issues/3113
"pytorch-lightning", # test_forecast_panel
"tensorboardX", # test_forecast_panel
"requests", # https://github.com/docker/docker-py/issues/3113
"packaging",
"pydantic==1.10.9",
"sympy",
"wolframalpha",
"dill", # a drop in replacement of pickle
],
"catboost": [
"catboost>=0.26,<1.2; python_version<'3.11'",
"catboost>=0.26,<=1.2.5; python_version>='3.11'",
"catboost>=0.26",
],
"blendsearch": [
"optuna>=2.8.0,<=3.6.1",
"packaging",
],
"ray": [
"ray[tune]~=1.13",
"ray[tune]>=1.13,<2.5.0",
],
"azureml": [
"azureml-mlflow",
@@ -131,33 +130,21 @@ setuptools.setup(
"seqeval",
],
"ts_forecast": [
"holidays<0.14", # to prevent installation error for prophet
"prophet>=1.0.1",
"holidays",
"prophet>=1.1.5",
"statsmodels>=0.12.2",
"hcrystalball==0.1.10",
"hcrystalball>=0.1.10",
],
"forecast": [
"holidays<0.14", # to prevent installation error for prophet
"prophet>=1.0.1",
"holidays",
"prophet>=1.1.5",
"statsmodels>=0.12.2",
"hcrystalball==0.1.10",
"pytorch-forecasting>=0.9.0; python_version<'3.11'",
# "pytorch-forecasting==0.10.1; python_version=='3.11'",
"pytorch-lightning==1.9.0",
"tensorboardX==2.6",
"hcrystalball>=0.1.10",
"pytorch-forecasting>=0.10.4; python_version<'3.13'",
"pytorch-lightning>=1.9.0",
"tensorboardX>=2.6",
],
"benchmark": ["catboost>=0.26", "psutil==5.8.0", "xgboost==1.3.3", "pandas==1.1.4"],
"openai": ["openai==0.27.8", "diskcache"],
"autogen": ["openai==0.27.8", "diskcache", "termcolor"],
"mathchat": ["openai==0.27.8", "diskcache", "termcolor", "sympy", "pydantic==1.10.9", "wolframalpha"],
"retrievechat": [
"openai==0.27.8",
"diskcache",
"termcolor",
"chromadb",
"tiktoken",
"sentence_transformers",
],
"synapse": [
"joblibspark>=0.5.0",
"optuna>=2.8.0,<=3.6.1",
@@ -170,10 +157,9 @@ setuptools.setup(
"Operating System :: OS Independent",
# Specify the Python versions you support here.
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
],
python_requires=">=3.8",
python_requires=">=3.10",
)

View File

@@ -17,6 +17,8 @@ from flaml import AutoML
from flaml.automl.ml import sklearn_metric_loss_score
from flaml.tune.spark.utils import check_spark
pytestmark = pytest.mark.spark
leaderboard = defaultdict(dict)
warnings.simplefilter(action="ignore")

View File

@@ -477,7 +477,10 @@ def test_forecast_classification(budget=5):
def get_stalliion_data():
from pytorch_forecasting.data.examples import get_stallion_data
data = get_stallion_data()
# data = get_stallion_data()
data = pd.read_parquet(
"https://raw.githubusercontent.com/sktime/pytorch-forecasting/refs/heads/main/examples/data/stallion.parquet"
)
# add time index - For datasets with no missing values, FLAML will automate this process
data["time_idx"] = data["date"].dt.year * 12 + data["date"].dt.month
data["time_idx"] -= data["time_idx"].min()

View File

@@ -0,0 +1,51 @@
import mlflow
import numpy as np
import pandas as pd
from flaml import AutoML
def test_max_iter_1():
date_rng = pd.date_range(start="2024-01-01", periods=100, freq="H")
X = pd.DataFrame({"ds": date_rng})
y_train_24h = np.random.rand(len(X)) * 100
# AutoML
settings = {
"max_iter": 1,
"estimator_list": ["xgboost", "lgbm"],
"starting_points": {"xgboost": {}, "lgbm": {}},
"task": "ts_forecast",
"log_file_name": "test_max_iter_1.log",
"seed": 41,
"mlflow_exp_name": "TestExp-max_iter-1",
"use_spark": False,
"n_concurrent_trials": 1,
"verbose": 1,
"featurization": "off",
"metric": "rmse",
"mlflow_logging": True,
}
automl = AutoML(**settings)
with mlflow.start_run(run_name="AutoMLModel-XGBoost-and-LGBM-max_iter_1"):
automl.fit(
X_train=X,
y_train=y_train_24h,
period=24,
X_val=X,
y_val=y_train_24h,
split_ratio=0,
force_cancel=False,
)
assert automl.model is not None, "AutoML failed to return a model"
assert automl.best_run_id is not None, "Best run ID should not be None with mlflow logging"
print("Best model:", automl.model)
print("Best run ID:", automl.best_run_id)
if __name__ == "__main__":
test_max_iter_1()

View File

@@ -10,6 +10,18 @@ from flaml import AutoML
class TestMLFlowLoggingParam:
def test_update_and_install_requirements(self):
import mlflow
from sklearn import tree
from flaml.fabric.mlflow import update_and_install_requirements
with mlflow.start_run(run_name="test") as run:
sk_model = tree.DecisionTreeClassifier()
mlflow.sklearn.log_model(sk_model, "model", registered_model_name="test")
update_and_install_requirements(run_id=run.info.run_id)
def test_should_start_new_run_by_default(self, automl_settings):
with mlflow.start_run() as parent_run:
automl = AutoML()

View File

@@ -143,4 +143,5 @@ def test_prep():
if __name__ == "__main__":
test_lrl2()
test_prep()

View File

@@ -1,8 +1,23 @@
import sys
import pytest
from minio.error import ServerError
from openml.exceptions import OpenMLServerException
try:
from minio.error import ServerError
except ImportError:
class ServerError(Exception):
pass
try:
from openml.exceptions import OpenMLServerException
except ImportError:
class OpenMLServerException(Exception):
pass
from requests.exceptions import ChunkedEncodingError, SSLError

View File

@@ -38,7 +38,7 @@ class TestLogging(unittest.TestCase):
"keep_search_state": True,
"learner_selector": "roundrobin",
}
X_train, y_train = fetch_california_housing(return_X_y=True)
X_train, y_train = fetch_california_housing(return_X_y=True, data_home="test")
n = len(y_train) >> 1
print(automl.model, automl.classes_, automl.predict(X_train))
automl.fit(

View File

@@ -47,7 +47,7 @@ class TestRegression(unittest.TestCase):
"n_jobs": 1,
"model_history": True,
}
X_train, y_train = fetch_california_housing(return_X_y=True)
X_train, y_train = fetch_california_housing(return_X_y=True, data_home="test")
n = int(len(y_train) * 9 // 10)
automl.fit(X_train=X_train[:n], y_train=y_train[:n], X_val=X_train[n:], y_val=y_train[n:], **automl_settings)
assert automl._state.eval_method == "holdout"
@@ -141,7 +141,7 @@ class TestRegression(unittest.TestCase):
"n_concurrent_trials": 10,
"hpo_method": hpo_method,
}
X_train, y_train = fetch_california_housing(return_X_y=True)
X_train, y_train = fetch_california_housing(return_X_y=True, data_home="test")
try:
automl_experiment.fit(X_train=X_train, y_train=y_train, **automl_settings)
print(automl_experiment.predict(X_train))
@@ -268,7 +268,7 @@ def test_reproducibility_of_regression_models(estimator: str):
"skip_transform": True,
"retrain_full": True,
}
X, y = fetch_california_housing(return_X_y=True, as_frame=True)
X, y = fetch_california_housing(return_X_y=True, as_frame=True, data_home="test")
automl.fit(X_train=X, y_train=y, **automl_settings)
best_model = automl.model
assert best_model is not None
@@ -314,7 +314,7 @@ def test_reproducibility_of_catboost_regression_model():
"skip_transform": True,
"retrain_full": True,
}
X, y = fetch_california_housing(return_X_y=True, as_frame=True)
X, y = fetch_california_housing(return_X_y=True, as_frame=True, data_home="test")
automl.fit(X_train=X, y_train=y, **automl_settings)
best_model = automl.model
assert best_model is not None
@@ -360,7 +360,7 @@ def test_reproducibility_of_lgbm_regression_model():
"skip_transform": True,
"retrain_full": True,
}
X, y = fetch_california_housing(return_X_y=True, as_frame=True)
X, y = fetch_california_housing(return_X_y=True, as_frame=True, data_home="test")
automl.fit(X_train=X, y_train=y, **automl_settings)
best_model = automl.model
assert best_model is not None
@@ -424,7 +424,7 @@ def test_reproducibility_of_underlying_regression_models(estimator: str):
"skip_transform": True,
"retrain_full": False,
}
X, y = fetch_california_housing(return_X_y=True, as_frame=True)
X, y = fetch_california_housing(return_X_y=True, as_frame=True, data_home="test")
automl.fit(X_train=X, y_train=y, **automl_settings)
best_model = automl.model
assert best_model is not None

View File

@@ -142,7 +142,7 @@ class TestScore:
def test_regression(self):
automl_experiment = AutoML()
X_train, y_train = fetch_california_housing(return_X_y=True)
X_train, y_train = fetch_california_housing(return_X_y=True, data_home="test")
n = int(len(y_train) * 9 // 10)
for each_estimator in [

View File

@@ -1,4 +1,6 @@
from sklearn.datasets import fetch_openml
import numpy as np
import pandas as pd
from sklearn.datasets import fetch_openml, load_iris
from sklearn.metrics import accuracy_score
from sklearn.model_selection import GroupKFold, KFold, train_test_split
@@ -48,7 +50,7 @@ def test_time():
_test(split_type="time")
def test_groups():
def test_groups_for_classification_task():
from sklearn.externals._arff import ArffException
try:
@@ -58,8 +60,6 @@ def test_groups():
X, y = load_wine(return_X_y=True)
import numpy as np
automl = AutoML()
automl_settings = {
"time_budget": 2,
@@ -68,7 +68,7 @@ def test_groups():
"model_history": True,
"eval_method": "cv",
"groups": np.random.randint(low=0, high=10, size=len(y)),
"estimator_list": ["lgbm", "rf", "xgboost", "kneighbor"],
"estimator_list": ["catboost", "lgbm", "rf", "xgboost", "kneighbor"],
"learner_selector": "roundrobin",
}
automl.fit(X, y, **automl_settings)
@@ -88,6 +88,72 @@ def test_groups():
automl.fit(X, y, **automl_settings)
def test_groups_for_regression_task():
"""Append nonsensical groups to iris dataset and use it to test that GroupKFold works for regression tasks"""
iris_dict_data = load_iris(as_frame=True) # numpy arrays
iris_data = iris_dict_data["frame"] # pandas dataframe data + target
rng = np.random.default_rng(42)
iris_data["cluster"] = rng.integers(
low=0, high=5, size=iris_data.shape[0]
) # np.random.randint(0, 5, iris_data.shape[0])
automl = AutoML()
X = iris_data[["sepal length (cm)", "sepal width (cm)", "petal length (cm)"]].to_numpy()
y = iris_data["petal width (cm)"]
X_train, X_test, y_train, y_test, groups_train, groups_test = train_test_split(
X, y, iris_data["cluster"], random_state=42
)
automl_settings = {
"max_iter": 5,
"time_budget": -1,
"metric": "r2",
"task": "regression",
"estimator_list": ["lgbm", "rf", "xgboost", "kneighbor"],
"eval_method": "cv",
"split_type": "uniform",
"groups": groups_train,
}
automl.fit(X_train, y_train, **automl_settings)
def test_groups_with_sample_weights():
"""Verifies that sample weights can be used with group splits i.e. that https://github.com/microsoft/FLAML/issues/1396 remains fixed"""
iris_dict_data = load_iris(as_frame=True) # numpy arrays
iris_data = iris_dict_data["frame"] # pandas dataframe data + target
iris_data["cluster"] = np.random.randint(0, 5, iris_data.shape[0])
automl = AutoML()
X = iris_data[["sepal length (cm)", "sepal width (cm)", "petal length (cm)"]].to_numpy()
y = iris_data["petal width (cm)"]
sample_weight = pd.Series(np.random.rand(X.shape[0]))
(
X_train,
X_test,
y_train,
y_test,
groups_train,
groups_test,
sample_weight_train,
sample_weight_test,
) = train_test_split(X, y, iris_data["cluster"], sample_weight, random_state=42)
automl_settings = {
"max_iter": 5,
"time_budget": -1,
"metric": "r2",
"task": "regression",
"log_file_name": "error.log",
"log_type": "all",
"estimator_list": ["lgbm"],
"eval_method": "cv",
"split_type": "group",
"groups": groups_train,
"sample_weight": sample_weight_train,
}
automl.fit(X_train, y_train, **automl_settings)
assert automl.model is not None
def test_stratified_groupkfold():
from minio.error import ServerError
from sklearn.model_selection import StratifiedGroupKFold
@@ -108,6 +174,7 @@ def test_stratified_groupkfold():
"split_type": splitter,
"groups": X_train["Airline"],
"estimator_list": [
"catboost",
"lgbm",
"rf",
"xgboost",
@@ -203,4 +270,4 @@ def test_object():
if __name__ == "__main__":
test_groups()
test_groups_for_classification_task()

View File

@@ -30,7 +30,7 @@ class TestTrainingLog(unittest.TestCase):
"keep_search_state": True,
"estimator_list": estimator_list,
}
X_train, y_train = fetch_california_housing(return_X_y=True)
X_train, y_train = fetch_california_housing(return_X_y=True, data_home="test")
automl.fit(X_train=X_train, y_train=y_train, **automl_settings)
# Check if the training log file is populated.
self.assertTrue(os.path.exists(filename))

View File

@@ -108,7 +108,14 @@ class TestWarmStart(unittest.TestCase):
def test_FLAML_sample_size_in_starting_points(self):
from minio.error import ServerError
from openml.exceptions import OpenMLServerException
try:
from openml.exceptions import OpenMLServerException
except ImportError:
class OpenMLServerException(Exception):
pass
from requests.exceptions import ChunkedEncodingError, SSLError
from flaml import AutoML

BIN
test/cal_housing_py3.pkz Normal file

Binary file not shown.

60
test/check_dependency.py Normal file
View File

@@ -0,0 +1,60 @@
import subprocess
from importlib.metadata import distributions
installed_libs = sorted(f"{dist.metadata['Name']}=={dist.version}" for dist in distributions())
first_tier_dependencies = [
"numpy",
"jupyter",
"lightgbm",
"xgboost",
"scipy",
"pandas",
"scikit-learn",
"thop",
"pytest",
"pytest-rerunfailures",
"coverage",
"pre-commit",
"torch",
"torchvision",
"catboost",
"rgf-python",
"optuna",
"openml",
"statsmodels",
"psutil",
"dataclasses",
"transformers[torch]",
"transformers",
"datasets",
"evaluate",
"nltk",
"rouge_score",
"hcrystalball",
"seqeval",
"pytorch-forecasting",
"mlflow-skinny",
"joblibspark",
"joblib",
"nbconvert",
"nbformat",
"ipykernel",
"pytorch-lightning",
"tensorboardX",
"requests",
"packaging",
"dill",
"ray",
"prophet",
]
for lib in installed_libs:
lib_name = lib.split("==")[0]
if lib_name in first_tier_dependencies:
print(lib)
# print current commit hash
commit_hash = subprocess.check_output(["git", "rev-parse", "HEAD"]).decode("utf-8").strip()
print(f"Current commit hash: {commit_hash}")

View File

@@ -2,11 +2,24 @@ from typing import Any, Dict, List, Union
import numpy as np
import pandas as pd
from catboost import CatBoostClassifier, CatBoostRegressor, Pool
import pytest
from sklearn.metrics import f1_score, r2_score
try:
from catboost import CatBoostClassifier, CatBoostRegressor, Pool
except ImportError: # pragma: no cover
CatBoostClassifier = None
CatBoostRegressor = None
Pool = None
def evaluate_cv_folds_with_underlying_model(X_train_all, y_train_all, kf, model: Any, task: str) -> pd.DataFrame:
def _is_catboost_model_type(model_type: type) -> bool:
if CatBoostClassifier is not None and CatBoostRegressor is not None:
return model_type is CatBoostClassifier or model_type is CatBoostRegressor
return getattr(model_type, "__module__", "").startswith("catboost")
def evaluate_cv_folds_with_underlying_model(X_train_all, y_train_all, kf, model: Any, task: str) -> List[float]:
"""Mimic the FLAML CV process to calculate the metrics across each fold.
:param X_train_all: X training data
@@ -17,7 +30,7 @@ def evaluate_cv_folds_with_underlying_model(X_train_all, y_train_all, kf, model:
:return: An array containing the metrics
"""
rng = np.random.RandomState(2020)
all_fold_metrics: List[Dict[str, Union[int, float]]] = []
all_fold_metrics: List[float] = []
for train_index, val_index in kf.split(X_train_all, y_train_all):
X_train_split, y_train_split = X_train_all, y_train_all
train_index = rng.permutation(train_index)
@@ -25,9 +38,11 @@ def evaluate_cv_folds_with_underlying_model(X_train_all, y_train_all, kf, model:
X_val = X_train_split.iloc[val_index]
y_train, y_val = y_train_split[train_index], y_train_split[val_index]
model_type = type(model)
if model_type is not CatBoostClassifier and model_type is not CatBoostRegressor:
if not _is_catboost_model_type(model_type):
model.fit(X_train, y_train)
else:
if Pool is None:
pytest.skip("catboost is not installed")
use_best_model = True
n = max(int(len(y_train) * 0.9), len(y_train) - 1000) if use_best_model else len(y_train)
X_tr, y_tr = (X_train)[:n], y_train[:n]
@@ -38,5 +53,5 @@ def evaluate_cv_folds_with_underlying_model(X_train_all, y_train_all, kf, model:
reproduced_metric = 1 - f1_score(y_val, y_pred_classes)
else:
reproduced_metric = 1 - r2_score(y_val, y_pred_classes)
all_fold_metrics.append(reproduced_metric)
all_fold_metrics.append(float(reproduced_metric))
return all_fold_metrics

View File

@@ -60,7 +60,7 @@ def test_housing(as_frame=True):
"starting_points": "data",
"max_iter": 0,
}
X_train, y_train = fetch_california_housing(return_X_y=True, as_frame=as_frame)
X_train, y_train = fetch_california_housing(return_X_y=True, as_frame=as_frame, data_home="test")
automl.fit(X_train, y_train, **automl_settings)
@@ -115,7 +115,7 @@ def test_suggest_classification():
def test_suggest_regression():
location = "test/default"
X_train, y_train = fetch_california_housing(return_X_y=True, as_frame=True)
X_train, y_train = fetch_california_housing(return_X_y=True, as_frame=True, data_home="test")
suggested = suggest_hyperparams("regression", X_train, y_train, "lgbm", location=location)
print(suggested)
suggested = preprocess_and_suggest_hyperparams("regression", X_train, y_train, "xgboost", location=location)
@@ -137,7 +137,7 @@ def test_rf():
print(rf)
location = "test/default"
X_train, y_train = fetch_california_housing(return_X_y=True, as_frame=True)
X_train, y_train = fetch_california_housing(return_X_y=True, as_frame=True, data_home="test")
rf = RandomForestRegressor(default_location=location)
rf.fit(X_train[:100], y_train[:100])
rf.predict(X_train)
@@ -155,7 +155,7 @@ def test_extratrees():
print(classifier)
location = "test/default"
X_train, y_train = fetch_california_housing(return_X_y=True, as_frame=True)
X_train, y_train = fetch_california_housing(return_X_y=True, as_frame=True, data_home="test")
regressor = ExtraTreesRegressor(default_location=location)
regressor.fit(X_train[:100], y_train[:100])
regressor.predict(X_train)
@@ -175,7 +175,7 @@ def test_lgbm():
print(classifier.classes_)
location = "test/default"
X_train, y_train = fetch_california_housing(return_X_y=True, as_frame=True)
X_train, y_train = fetch_california_housing(return_X_y=True, as_frame=True, data_home="test")
regressor = LGBMRegressor(default_location=location)
regressor.fit(X_train, y_train)
regressor.predict(X_train)
@@ -194,7 +194,7 @@ def test_xgboost():
print(classifier.classes_)
location = "test/default"
X_train, y_train = fetch_california_housing(return_X_y=True, as_frame=True)
X_train, y_train = fetch_california_housing(return_X_y=True, as_frame=True, data_home="test")
regressor = XGBRegressor(default_location=location)
regressor.fit(X_train[:100], y_train[:100])
regressor.predict(X_train)

View File

@@ -24,6 +24,8 @@ model_path_list = [
if sys.platform.startswith("darwin") and sys.version_info[0] == 3 and sys.version_info[1] == 11:
pytest.skip("skipping Python 3.11 on MacOS", allow_module_level=True)
pytestmark = pytest.mark.spark # set to spark as parallel testing raised RuntimeError
def test_switch_1_1():
data_idx, model_path_idx = 0, 0

View File

@@ -5,6 +5,8 @@ import sys
import pytest
from utils import get_automl_settings, get_toy_data_seqclassification
pytestmark = pytest.mark.spark # set to spark as parallel testing raised MlflowException of changing parameter
@pytest.mark.skipif(sys.platform in ["darwin", "win32"], reason="do not run on mac os or windows")
def test_cv():

View File

@@ -10,6 +10,10 @@ from flaml.default import portfolio
if sys.platform.startswith("darwin") and sys.version_info[0] == 3 and sys.version_info[1] == 11:
pytest.skip("skipping Python 3.11 on MacOS", allow_module_level=True)
pytestmark = (
pytest.mark.spark
) # set to spark as parallel testing raised ValueError: Feature NonExisting not implemented.
def pop_args(fit_kwargs):
fit_kwargs.pop("max_iter", None)
@@ -26,21 +30,33 @@ def test_build_portfolio(path="./test/nlp/default", strategy="greedy"):
@pytest.mark.skipif(sys.platform == "win32", reason="do not run on windows")
def test_starting_point_not_in_search_space():
from flaml import AutoML
"""Regression test for invalid starting points and custom_hp.
This test must not require network access to Hugging Face.
"""
"""
test starting_points located outside of the search space, and custom_hp is not set
"""
from flaml.automl.state import SearchState
from flaml.automl.task.factory import task_factory
this_estimator_name = "transformer"
X_train, y_train, X_val, y_val, _ = get_toy_data_seqclassification()
X_train, y_train, _, _, _ = get_toy_data_seqclassification()
task = task_factory("seq-classification", X_train, y_train)
estimator_class = task.estimator_class_from_str(this_estimator_name)
estimator_class.init()
automl = AutoML()
automl_settings = get_automl_settings(estimator_name=this_estimator_name)
automl_settings["starting_points"] = {this_estimator_name: [{"learning_rate": 2e-3}]}
automl.fit(X_train, y_train, **automl_settings)
assert automl._search_states[this_estimator_name].init_config[0]["learning_rate"] != 2e-3
# SearchState is where invalid starting points are filtered out when max_iter > 1.
search_state = SearchState(
learner_class=estimator_class,
data=X_train,
task=task,
starting_point={"learning_rate": 2e-3},
max_iter=3,
budget=10,
)
assert search_state.init_config and search_state.init_config[0].get("learning_rate") != 2e-3
"""
test starting_points located outside of the search space, and custom_hp is set
@@ -48,39 +64,60 @@ def test_starting_point_not_in_search_space():
from flaml import tune
X_train, y_train, X_val, y_val, _ = get_toy_data_seqclassification()
X_train, y_train, _, _, _ = get_toy_data_seqclassification()
this_estimator_name = "transformer_ms"
automl = AutoML()
automl_settings = get_automl_settings(estimator_name=this_estimator_name)
task = task_factory("seq-classification", X_train, y_train)
estimator_class = task.estimator_class_from_str(this_estimator_name)
estimator_class.init()
automl_settings["custom_hp"] = {
this_estimator_name: {
"model_path": {
"domain": "albert-base-v2",
},
"learning_rate": {
"domain": tune.choice([1e-4, 1e-5]),
},
"per_device_train_batch_size": {
"domain": 2,
},
}
custom_hp = {
"model_path": {
"domain": "albert-base-v2",
},
"learning_rate": {
"domain": tune.choice([1e-4, 1e-5]),
},
"per_device_train_batch_size": {
"domain": 2,
},
}
automl_settings["starting_points"] = "data:test/nlp/default/"
automl.fit(X_train, y_train, **automl_settings)
assert len(automl._search_states[this_estimator_name].init_config[0]) == len(
automl._search_states[this_estimator_name]._search_space_domain
) - len(automl_settings["custom_hp"][this_estimator_name]), (
# Simulate a suggested starting point (e.g. from portfolio) which becomes invalid
# after custom_hp constrains the space.
invalid_starting_points = [
{
"learning_rate": 1e-5,
"num_train_epochs": 1.0,
"per_device_train_batch_size": 8,
"seed": 43,
"global_max_steps": 100,
"model_path": "google/electra-base-discriminator",
}
]
search_state = SearchState(
learner_class=estimator_class,
data=X_train,
task=task,
starting_point=invalid_starting_points,
custom_hp=custom_hp,
max_iter=3,
budget=10,
)
assert search_state.init_config, "Expected a non-empty init_config list"
init_config0 = search_state.init_config[0]
assert init_config0 is not None
assert len(init_config0) == len(search_state._search_space_domain) - len(custom_hp), (
"The search space is updated with the custom_hp on {} hyperparameters of "
"the specified estimator without an initial value. Thus a valid init config "
"should only contain the cardinality of the search space minus {}".format(
len(automl_settings["custom_hp"][this_estimator_name]),
len(automl_settings["custom_hp"][this_estimator_name]),
len(custom_hp),
len(custom_hp),
)
)
assert automl._search_states[this_estimator_name].search_space["model_path"] == "albert-base-v2"
assert search_state.search_space["model_path"] == "albert-base-v2"
if os.path.exists("test/data/output/"):
try:
@@ -102,7 +139,13 @@ def test_points_to_evaluate():
automl_settings["custom_hp"] = {"transformer_ms": {"model_path": {"domain": "google/electra-small-discriminator"}}}
automl.fit(X_train, y_train, **automl_settings)
try:
automl.fit(X_train, y_train, **automl_settings)
except OSError as e:
message = str(e)
if "Too Many Requests" in message or "rate limit" in message.lower():
pytest.skip(f"Skipping HF model load/training: {message}")
raise
if os.path.exists("test/data/output/"):
try:
@@ -137,7 +180,14 @@ def test_zero_shot_nomodel():
fit_kwargs = automl_settings.pop("fit_kwargs_by_estimator", {}).get(estimator_name)
fit_kwargs.update(automl_settings)
pop_args(fit_kwargs)
model.fit(X_train, y_train, **fit_kwargs)
try:
model.fit(X_train, y_train, **fit_kwargs)
except OSError as e:
message = str(e)
if "Too Many Requests" in message or "rate limit" in message.lower():
pytest.skip(f"Skipping HF model load/training: {message}")
raise
if os.path.exists("test/data/output/"):
try:

View File

@@ -7,7 +7,7 @@ from sklearn.model_selection import train_test_split
from flaml import tune
from flaml.automl.model import LGBMEstimator
data = fetch_california_housing(return_X_y=False, as_frame=True)
data = fetch_california_housing(return_X_y=False, as_frame=True, data_home="test")
X, y = data.data, data.target
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=42)
X_train_ref = ray.put(X_train)

View File

@@ -11,7 +11,7 @@ automl_settings = {
"task": "regression",
"log_file_name": "test/california.log",
}
X_train, y_train = fetch_california_housing(return_X_y=True)
X_train, y_train = fetch_california_housing(return_X_y=True, data_home="test")
# Train with labeled input data
automl.fit(X_train=X_train, y_train=y_train, **automl_settings)
print(automl.model)

View File

@@ -3,11 +3,13 @@ import sys
import warnings
import mlflow
import numpy as np
import pytest
import sklearn.datasets as skds
from packaging.version import Version
from flaml import AutoML
from flaml.automl.data import auto_convert_dtypes_pandas, auto_convert_dtypes_spark, get_random_dataframe
from flaml.tune.spark.utils import check_spark
warnings.simplefilter(action="ignore")
@@ -58,7 +60,7 @@ if sys.version_info >= (3, 11):
else:
skip_py311 = False
pytestmark = pytest.mark.skipif(skip_spark, reason="Spark is not installed. Skip all spark tests.")
pytestmark = [pytest.mark.skipif(skip_spark, reason="Spark is not installed. Skip all spark tests."), pytest.mark.spark]
def _test_spark_synapseml_lightgbm(spark=None, task="classification"):
@@ -296,11 +298,88 @@ def _test_spark_large_df():
print("time cost in minutes: ", (end_time - start_time) / 60)
def test_get_random_dataframe():
# Test with default parameters
df = get_random_dataframe(n_rows=50, ratio_none=0.2, seed=123)
assert df.shape == (50, 14) # Default is 200 rows and 14 columns
# Test column types
assert "timestamp" in df.columns and np.issubdtype(df["timestamp"].dtype, np.datetime64)
assert "id" in df.columns and np.issubdtype(df["id"].dtype, np.integer)
assert "score" in df.columns and np.issubdtype(df["score"].dtype, np.floating)
assert "category" in df.columns and df["category"].dtype.name == "category"
def test_auto_convert_dtypes_pandas():
# Create a test DataFrame with various types
import pandas as pd
test_df = pd.DataFrame(
{
"int_col": ["1", "2", "3", "4", "5", "6", "6"],
"float_col": ["1.1", "2.2", "3.3", "NULL", "5.5", "6.6", "6.6"],
"date_col": ["2021-01-01", "2021-02-01", "NA", "2021-04-01", "2021-05-01", "2021-06-01", "2021-06-01"],
"cat_col": ["A", "B", "A", "A", "B", "A", "B"],
"string_col": ["text1", "text2", "text3", "text4", "text5", "text6", "text7"],
}
)
# Convert dtypes
converted_df, schema = auto_convert_dtypes_pandas(test_df)
# Check conversions
assert schema["int_col"] == "int"
assert schema["float_col"] == "double"
assert schema["date_col"] == "timestamp"
assert schema["cat_col"] == "category"
assert schema["string_col"] == "string"
def test_auto_convert_dtypes_spark():
"""Test auto_convert_dtypes_spark function with various data types."""
import pandas as pd
# Create a test DataFrame with various types
test_pdf = pd.DataFrame(
{
"int_col": ["1", "2", "3", "4", "NA"],
"float_col": ["1.1", "2.2", "3.3", "NULL", "5.5"],
"date_col": ["2021-01-01", "2021-02-01", "NA", "2021-04-01", "2021-05-01"],
"cat_col": ["A", "B", "A", "C", "B"],
"string_col": ["text1", "text2", "text3", "text4", "text5"],
}
)
# Convert pandas DataFrame to Spark DataFrame
test_df = spark.createDataFrame(test_pdf)
# Convert dtypes
converted_df, schema = auto_convert_dtypes_spark(test_df)
# Check conversions
assert schema["int_col"] == "int"
assert schema["float_col"] == "double"
assert schema["date_col"] == "timestamp"
assert schema["cat_col"] == "string" # Conceptual category in schema
assert schema["string_col"] == "string"
# Verify the actual data types from the Spark DataFrame
spark_dtypes = dict(converted_df.dtypes)
assert spark_dtypes["int_col"] == "int"
assert spark_dtypes["float_col"] == "double"
assert spark_dtypes["date_col"] == "timestamp"
assert spark_dtypes["cat_col"] == "string" # In Spark, categories are still strings
assert spark_dtypes["string_col"] == "string"
if __name__ == "__main__":
test_spark_synapseml_classification()
test_spark_synapseml_regression()
test_spark_synapseml_rank()
test_spark_input_df()
test_get_random_dataframe()
test_auto_convert_dtypes_pandas()
test_auto_convert_dtypes_spark()
# import cProfile
# import pstats

View File

@@ -25,7 +25,7 @@ os.environ["FLAML_MAX_CONCURRENT"] = "2"
spark_available, _ = check_spark()
skip_spark = not spark_available
pytestmark = pytest.mark.skipif(skip_spark, reason="Spark is not installed. Skip all spark tests.")
pytestmark = [pytest.mark.skipif(skip_spark, reason="Spark is not installed. Skip all spark tests."), pytest.mark.spark]
def test_parallel_xgboost(hpo_method=None, data_size=1000):

View File

@@ -1,6 +1,7 @@
import os
import unittest
import pytest
from sklearn.datasets import load_wine
from flaml import AutoML
@@ -24,6 +25,8 @@ if os.path.exists(os.path.join(os.getcwd(), "test", "spark", "custom_mylearner.p
else:
skip_my_learner = True
pytestmark = pytest.mark.spark
class TestEnsemble(unittest.TestCase):
def setUp(self) -> None:

View File

@@ -9,7 +9,7 @@ from flaml.tune.spark.utils import check_spark
spark_available, _ = check_spark()
skip_spark = not spark_available
pytestmark = pytest.mark.skipif(skip_spark, reason="Spark is not installed. Skip all spark tests.")
pytestmark = [pytest.mark.skipif(skip_spark, reason="Spark is not installed. Skip all spark tests."), pytest.mark.spark]
os.environ["FLAML_MAX_CONCURRENT"] = "2"
@@ -22,7 +22,7 @@ def base_automl(n_concurrent_trials=1, use_ray=False, use_spark=False, verbose=0
except (ServerError, Exception):
from sklearn.datasets import fetch_california_housing
X_train, y_train = fetch_california_housing(return_X_y=True)
X_train, y_train = fetch_california_housing(return_X_y=True, data_home="test")
automl = AutoML()
settings = {
"time_budget": 3, # total running time in seconds

View File

@@ -21,6 +21,7 @@ try:
from pyspark.ml.feature import VectorAssembler
except ImportError:
pass
pytestmark = pytest.mark.spark
warnings.filterwarnings("ignore")
skip_spark = importlib.util.find_spec("pyspark") is None

View File

@@ -2,6 +2,7 @@ import os
import unittest
import numpy as np
import pytest
import scipy.sparse
from sklearn.datasets import load_iris, load_wine
@@ -12,6 +13,7 @@ from flaml.tune.spark.utils import check_spark
spark_available, _ = check_spark()
skip_spark = not spark_available
pytestmark = pytest.mark.spark
os.environ["FLAML_MAX_CONCURRENT"] = "2"

View File

@@ -9,7 +9,7 @@ from flaml.tune.spark.utils import check_spark
spark_available, _ = check_spark()
skip_spark = not spark_available
pytestmark = pytest.mark.skipif(skip_spark, reason="Spark is not installed. Skip all spark tests.")
pytestmark = [pytest.mark.skipif(skip_spark, reason="Spark is not installed. Skip all spark tests."), pytest.mark.spark]
here = os.path.abspath(os.path.dirname(__file__))
os.environ["FLAML_MAX_CONCURRENT"] = "2"

View File

@@ -25,7 +25,7 @@ try:
except ImportError:
skip_spark = True
pytestmark = pytest.mark.skipif(skip_spark, reason="Spark is not installed. Skip all spark tests.")
pytestmark = [pytest.mark.skipif(skip_spark, reason="Spark is not installed. Skip all spark tests."), pytest.mark.spark]
def test_overtime():

View File

@@ -2,8 +2,23 @@ import os
import sys
import pytest
from minio.error import ServerError
from openml.exceptions import OpenMLServerException
try:
from minio.error import ServerError
except ImportError:
class ServerError(Exception):
pass
try:
from openml.exceptions import OpenMLServerException
except ImportError:
class OpenMLServerException(Exception):
pass
from requests.exceptions import ChunkedEncodingError, SSLError
from flaml.tune.spark.utils import check_spark
@@ -11,7 +26,7 @@ from flaml.tune.spark.utils import check_spark
spark_available, _ = check_spark()
skip_spark = not spark_available
pytestmark = pytest.mark.skipif(skip_spark, reason="Spark is not installed. Skip all spark tests.")
pytestmark = [pytest.mark.skipif(skip_spark, reason="Spark is not installed. Skip all spark tests."), pytest.mark.spark]
os.environ["FLAML_MAX_CONCURRENT"] = "2"

View File

@@ -14,7 +14,7 @@ from flaml.tune.spark.utils import check_spark
spark_available, _ = check_spark()
skip_spark = not spark_available
pytestmark = pytest.mark.skipif(skip_spark, reason="Spark is not installed. Skip all spark tests.")
pytestmark = [pytest.mark.skipif(skip_spark, reason="Spark is not installed. Skip all spark tests."), pytest.mark.spark]
os.environ["FLAML_MAX_CONCURRENT"] = "2"
X, y = load_breast_cancer(return_X_y=True)

View File

@@ -36,7 +36,7 @@ except ImportError:
print("Spark is not installed. Skip all spark tests.")
skip_spark = True
pytestmark = pytest.mark.skipif(skip_spark, reason="Spark is not installed. Skip all spark tests.")
pytestmark = [pytest.mark.skipif(skip_spark, reason="Spark is not installed. Skip all spark tests."), pytest.mark.spark]
def test_with_parameters_spark():

View File

@@ -5,17 +5,38 @@ import sys
import unittest
import numpy as np
import openml
try:
import openml
except ImportError:
openml = None
import pandas as pd
import pytest
import scipy.sparse
from minio.error import ServerError
try:
from minio.error import ServerError
except ImportError:
class ServerError(Exception):
pass
from requests.exceptions import SSLError
from sklearn.metrics import mean_absolute_error, mean_squared_error
from flaml import AutoVW
from flaml.tune import loguniform, polynomial_expansion_set
try:
from vowpalwabbit import pyvw
except ImportError:
skip_vw_test = True
else:
skip_vw_test = False
pytest.skip("skipping if no openml", allow_module_level=True) if openml is None else None
VW_DS_DIR = "test/data/"
NS_LIST = list(string.ascii_lowercase) + list(string.ascii_uppercase)
logger = logging.getLogger(__name__)
@@ -351,14 +372,9 @@ def get_vw_tuning_problem(tuning_hp="NamesapceInteraction"):
return vw_oml_problem_args, vw_online_aml_problem
@pytest.mark.skipif(
"3.10" in sys.version or "3.11" in sys.version,
reason="do not run on py >= 3.10",
)
@pytest.mark.skipif(skip_vw_test, reason="vowpalwabbit not installed")
class TestAutoVW(unittest.TestCase):
def test_vw_oml_problem_and_vanilla_vw(self):
from vowpalwabbit import pyvw
try:
vw_oml_problem_args, vw_online_aml_problem = get_vw_tuning_problem()
except (SSLError, ServerError, Exception) as e:

View File

@@ -59,6 +59,17 @@ def _test_hf_data():
except requests.exceptions.ConnectionError:
return
# Tests will only run if there is a GPU available
try:
import ray
pg = ray.util.placement_group([{"CPU": 1, "GPU": 1}])
if not pg.wait(timeout_seconds=10): # Wait 10 seconds for resources
raise RuntimeError("No available node types can fulfill resource request!")
except RuntimeError:
return
custom_sent_keys = ["sentence1", "sentence2"]
label_key = "label"

View File

@@ -6,12 +6,12 @@ from sklearn.model_selection import train_test_split
from flaml import tune
from flaml.automl.model import LGBMEstimator
data = fetch_california_housing(return_X_y=False, as_frame=True)
data = fetch_california_housing(return_X_y=False, as_frame=True, data_home="test")
df, X, y = data.frame, data.data, data.target
df_train, _, X_train, X_test, _, y_test = train_test_split(df, X, y, test_size=0.33, random_state=42)
csv_file_name = "test/housing.csv"
df_train.to_csv(csv_file_name, index=False)
# X, y = fetch_california_housing(return_X_y=True, as_frame=True)
# X, y = fetch_california_housing(return_X_y=True, as_frame=True, data_home="test")
# X_train, X_test, y_train, y_test = train_test_split(
# X, y, test_size=0.33, random_state=42
# )

View File

@@ -80,11 +80,38 @@ from flaml import AutoML
from sklearn.datasets import load_iris
X, y = load_iris(return_X_y=True)
automl = AutoML(settings={"time_budget": 3})
settings = {"time_budget": 3}
automl = AutoML(**settings)
automl.fit(X, y)
print(f"{automl.best_estimator=}")
print(f"{automl.best_config=}")
print(f"params for best estimator: {automl.model.config2params(automl.best_config)}")
```
If the automl instance is not accessible and you've the `best_config`. You can also convert it with below code:
```python
from flaml.automl.task.factory import task_factory
task = "classification"
best_estimator = "rf"
best_config = {
"n_estimators": 15,
"max_features": 0.35807183923834934,
"max_leaves": 12,
"criterion": "gini",
}
model_class = task_factory(task).estimator_class_from_str(best_estimator)(task=task)
best_params = model_class.config2params(best_config)
```
Then you can use it to train the sklearn estimators directly:
```python
from sklearn.ensemble import RandomForestClassifier
model = RandomForestClassifier(**best_params)
model.fit(X, y)
```

View File

@@ -2,9 +2,9 @@
"name": "website",
"version": "0.0.0",
"private": true,
"resolutions" :{
"nth-check":"2.0.1",
"trim":"0.0.3",
"resolutions": {
"nth-check": "2.0.1",
"trim": "0.0.3",
"got": "11.8.5",
"node-forge": "1.3.0",
"minimatch": "3.0.5",
@@ -12,7 +12,7 @@
"eta": "2.0.0",
"@sideway/formula": "3.0.1",
"http-cache-semantics": "4.1.1"
},
},
"scripts": {
"docusaurus": "docusaurus",
"start": "docusaurus start",
@@ -33,13 +33,13 @@
"clsx": "^1.1.1",
"file-loader": "^6.2.0",
"hast-util-is-element": "1.1.0",
"minimatch": "3.0.5",
"react": "^17.0.1",
"react-dom": "^17.0.1",
"rehype-katex": "4",
"remark-math": "3",
"trim": "^0.0.3",
"url-loader": "^4.1.1",
"minimatch": "3.0.5"
"url-loader": "^4.1.1"
},
"browserslist": {
"production": [

View File

@@ -153,6 +153,15 @@
"@babel/highlight" "^7.23.4"
chalk "^2.4.2"
"@babel/code-frame@^7.26.2":
version "7.26.2"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.26.2.tgz#4b5fab97d33338eff916235055f0ebc21e573a85"
integrity sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==
dependencies:
"@babel/helper-validator-identifier" "^7.25.9"
js-tokens "^4.0.0"
picocolors "^1.0.0"
"@babel/compat-data@^7.17.7", "@babel/compat-data@^7.20.0", "@babel/compat-data@^7.20.1":
version "7.20.1"
resolved "https://registry.npmmirror.com/@babel/compat-data/-/compat-data-7.20.1.tgz#f2e6ef7790d8c8dbf03d379502dcc246dcce0b30"
@@ -429,6 +438,11 @@
resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz#9478c707febcbbe1ddb38a3d91a2e054ae622d83"
integrity sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==
"@babel/helper-string-parser@^7.25.9":
version "7.25.9"
resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz#1aabb72ee72ed35789b4bbcad3ca2862ce614e8c"
integrity sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==
"@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1":
version "7.19.1"
resolved "https://registry.npmmirror.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2"
@@ -439,6 +453,11 @@
resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0"
integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==
"@babel/helper-validator-identifier@^7.25.9":
version "7.25.9"
resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz#24b64e2c3ec7cd3b3c547729b8d16871f22cbdc7"
integrity sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==
"@babel/helper-validator-option@^7.18.6":
version "7.18.6"
resolved "https://registry.npmmirror.com/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8"
@@ -455,13 +474,12 @@
"@babel/types" "^7.19.0"
"@babel/helpers@^7.12.5", "@babel/helpers@^7.20.1":
version "7.20.1"
resolved "https://registry.npmmirror.com/@babel/helpers/-/helpers-7.20.1.tgz#2ab7a0fcb0a03b5bf76629196ed63c2d7311f4c9"
integrity sha512-J77mUVaDTUJFZ5BpP6mMn6OIl3rEWymk2ZxDBQJUG3P+PbmyMcF3bYWvz0ma69Af1oobDqT/iAsvzhB58xhQUg==
version "7.26.10"
resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.26.10.tgz#6baea3cd62ec2d0c1068778d63cb1314f6637384"
integrity sha512-UPYc3SauzZ3JGgj87GgZ89JVdC5dj0AoetR5Bw6wj4niittNyFh6+eOGonYvJ1ao6B8lEa3Q3klS7ADZ53bc5g==
dependencies:
"@babel/template" "^7.18.10"
"@babel/traverse" "^7.20.1"
"@babel/types" "^7.20.0"
"@babel/template" "^7.26.9"
"@babel/types" "^7.26.10"
"@babel/highlight@^7.18.6":
version "7.18.6"
@@ -491,6 +509,13 @@
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.6.tgz#ba1c9e512bda72a47e285ae42aff9d2a635a9e3b"
integrity sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ==
"@babel/parser@^7.26.9":
version "7.26.10"
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.26.10.tgz#e9bdb82f14b97df6569b0b038edd436839c57749"
integrity sha512-6aQR2zGE/QFi8JpDLjUZEPYOs7+mhKXm86VaKFiLP35JQwQb6bwUE+XbvkH0EptsYhbNBSUGaUBLKqxH1xSgsA==
dependencies:
"@babel/types" "^7.26.10"
"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6":
version "7.18.6"
resolved "https://registry.npmmirror.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz#da5b8f9a580acdfbe53494dba45ea389fb09a4d2"
@@ -1196,19 +1221,19 @@
"@babel/plugin-transform-typescript" "^7.18.6"
"@babel/runtime-corejs3@^7.15.4":
version "7.20.1"
resolved "https://registry.npmmirror.com/@babel/runtime-corejs3/-/runtime-corejs3-7.20.1.tgz#d0775a49bb5fba77e42cbb7276c9955c7b05af8d"
integrity sha512-CGulbEDcg/ND1Im7fUNRZdGXmX2MTWVVZacQi/6DiKE5HNwZ3aVTm5PV4lO8HHz0B2h8WQyvKKjbX5XgTtydsg==
version "7.26.10"
resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.26.10.tgz#5a3185ca2813f8de8ae68622572086edf5cf51f2"
integrity sha512-uITFQYO68pMEYR46AHgQoyBg7KPPJDAbGn4jUTIRgCFJIp88MIBUianVOplhZDEec07bp9zIyr4Kp0FCyQzmWg==
dependencies:
core-js-pure "^3.25.1"
regenerator-runtime "^0.13.10"
core-js-pure "^3.30.2"
regenerator-runtime "^0.14.0"
"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.12.13", "@babel/runtime@^7.15.4", "@babel/runtime@^7.8.4":
version "7.20.1"
resolved "https://registry.npmmirror.com/@babel/runtime/-/runtime-7.20.1.tgz#1148bb33ab252b165a06698fde7576092a78b4a9"
integrity sha512-mrzLkl6U9YLF8qpqI7TB82PESyEGjm/0Ly91jG575eVxMMlb8fYfOXFZIJ8XfLrJZQbm7dlKry2bJmXBUEkdFg==
version "7.26.10"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.26.10.tgz#a07b4d8fa27af131a633d7b3524db803eb4764c2"
integrity sha512-2WJMeRQPHKSPemqk/awGrAiuFfzBmOIPXKizAsVhWH9YJqLZ0H+HS4c8loHGgW6utJ3E/ejXQUsiGaQy2NZ9Fw==
dependencies:
regenerator-runtime "^0.13.10"
regenerator-runtime "^0.14.0"
"@babel/template@^7.12.7", "@babel/template@^7.18.10":
version "7.18.10"
@@ -1228,6 +1253,15 @@
"@babel/parser" "^7.22.15"
"@babel/types" "^7.22.15"
"@babel/template@^7.26.9":
version "7.26.9"
resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.26.9.tgz#4577ad3ddf43d194528cff4e1fa6b232fa609bb2"
integrity sha512-qyRplbeIpNZhmzOysF/wFMuP9sctmh2cFzRAZOn1YapxBsE1i9bJIY586R/WBLfLcmcBlM8ROBiQURnnNy+zfA==
dependencies:
"@babel/code-frame" "^7.26.2"
"@babel/parser" "^7.26.9"
"@babel/types" "^7.26.9"
"@babel/traverse@^7.12.13", "@babel/traverse@^7.12.9", "@babel/traverse@^7.19.0", "@babel/traverse@^7.19.1", "@babel/traverse@^7.20.1":
version "7.23.6"
resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.6.tgz#b53526a2367a0dd6edc423637f3d2d0f2521abc5"
@@ -1262,6 +1296,14 @@
"@babel/helper-validator-identifier" "^7.22.20"
to-fast-properties "^2.0.0"
"@babel/types@^7.26.10", "@babel/types@^7.26.9":
version "7.26.10"
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.26.10.tgz#396382f6335bd4feb65741eacfc808218f859259"
integrity sha512-emqcG3vHrpxUKTrxcblR36dcrcoRDvKmnL/dCL6ZsHaShW80qxCAcNhzQZrpeM765VzEos+xOi4s+r4IXzTwdQ==
dependencies:
"@babel/helper-string-parser" "^7.25.9"
"@babel/helper-validator-identifier" "^7.25.9"
"@docsearch/css@3.3.0":
version "3.3.0"
resolved "https://registry.npmmirror.com/@docsearch/css/-/css-3.3.0.tgz#d698e48302d12240d7c2f7452ccb2d2239a8cd80"
@@ -2592,9 +2634,9 @@ ajv@^8.0.0, ajv@^8.8.0:
uri-js "^4.2.2"
algoliasearch-helper@^3.5.5:
version "3.11.1"
resolved "https://registry.npmmirror.com/algoliasearch-helper/-/algoliasearch-helper-3.11.1.tgz#d83ab7f1a2a374440686ef7a144b3c288b01188a"
integrity sha512-mvsPN3eK4E0bZG0/WlWJjeqe/bUD2KOEVOl0GyL/TGXn6wcpZU8NOuztGHCUKXkyg5gq6YzUakVTmnmSSO5Yiw==
version "3.26.0"
resolved "https://registry.yarnpkg.com/algoliasearch-helper/-/algoliasearch-helper-3.26.0.tgz#d6e283396a9fc5bf944f365dc3b712570314363f"
integrity sha512-Rv2x3GXleQ3ygwhkhJubhhYGsICmShLAiqtUuJTUkr9uOCOXyF2E71LVT4XDnVffbknv8XgScP4U0Oxtgm+hIw==
dependencies:
"@algolia/events" "^4.0.1"
@@ -2863,9 +2905,9 @@ boxen@^5.0.0, boxen@^5.0.1:
wrap-ansi "^7.0.0"
brace-expansion@^1.1.7:
version "1.1.11"
resolved "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==
version "1.1.12"
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.12.tgz#ab9b454466e5a8cc3a187beaad580412a9c5b843"
integrity sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==
dependencies:
balanced-match "^1.0.0"
concat-map "0.0.1"
@@ -2995,15 +3037,10 @@ caniuse-api@^3.0.0:
lodash.memoize "^4.1.2"
lodash.uniq "^4.5.0"
caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001400, caniuse-lite@^1.0.30001426:
version "1.0.30001430"
resolved "https://registry.npmmirror.com/caniuse-lite/-/caniuse-lite-1.0.30001430.tgz#638a8ae00b5a8a97e66ff43733b2701f81b101fa"
integrity sha512-IB1BXTZKPDVPM7cnV4iaKaHxckvdr/3xtctB3f7Hmenx3qYBhGtTZ//7EllK66aKXW98Lx0+7Yr0kxBtIt3tzg==
caniuse-lite@^1.0.30001646:
version "1.0.30001657"
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001657.tgz#29fd504bffca719d1c6b63a1f6f840be1973a660"
integrity sha512-DPbJAlP8/BAXy3IgiWmZKItubb3TYGP0WscQQlVGIfT4s/YlFYVuJgyOsQNP7rJRChx/qdMeLJQJP0Sgg2yjNA==
caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001400, caniuse-lite@^1.0.30001426, caniuse-lite@^1.0.30001646:
version "1.0.30001718"
resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001718.tgz"
integrity sha512-AflseV1ahcSunK53NfEs9gFWgOEmzr0f+kaMFA4xiLZlr9Hzt7HxcSpIFcnNCUkz6R6dWKa54rUz3HUmI3nVcw==
ccount@^1.0.0, ccount@^1.0.3:
version "1.1.0"
@@ -3326,10 +3363,10 @@ core-js-compat@^3.25.1:
dependencies:
browserslist "^4.21.4"
core-js-pure@^3.25.1:
version "3.26.0"
resolved "https://registry.npmmirror.com/core-js-pure/-/core-js-pure-3.26.0.tgz#7ad8a5dd7d910756f3124374b50026e23265ca9a"
integrity sha512-LiN6fylpVBVwT8twhhluD9TzXmZQQsr2I2eIKtWNbZI1XMfBT7CV18itaN6RA7EtQd/SDdRx/wzvAShX2HvhQA==
core-js-pure@^3.30.2:
version "3.41.0"
resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.41.0.tgz#349fecad168d60807a31e83c99d73d786fe80811"
integrity sha512-71Gzp96T9YPk63aUvE5Q5qP+DryB4ZloUZPSOebGM88VNw8VNfvdA7z6kGA8iGOTEzAomsRidp4jXSmUIJsL+Q==
core-js@^3.18.0:
version "3.26.0"
@@ -3371,9 +3408,9 @@ cross-fetch@^3.1.5:
node-fetch "2.6.7"
cross-spawn@^7.0.3:
version "7.0.3"
resolved "https://registry.npmmirror.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
version "7.0.6"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f"
integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==
dependencies:
path-key "^3.1.0"
shebang-command "^2.0.0"
@@ -4830,9 +4867,9 @@ http-parser-js@>=0.5.1:
integrity sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q==
http-proxy-middleware@^2.0.3:
version "2.0.7"
resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-2.0.7.tgz#915f236d92ae98ef48278a95dedf17e991936ec6"
integrity sha512-fgVY8AV7qU7z/MmXJ/rxwbrtQH4jBQ9m7kp3llF0liB7glmFeVZFBepQb32T3y8n8k2+AEYuMPCpinYW+/CuRA==
version "2.0.9"
resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-2.0.9.tgz#e9e63d68afaa4eee3d147f39149ab84c0c2815ef"
integrity sha512-c1IyJYLYppU574+YI7R4QyX2ystMtVXZwIdzazUIPIJsHuWNd+mho2j+bKoHftndicGj9yh+xjd+l0yj7VeT1Q==
dependencies:
"@types/http-proxy" "^1.17.8"
http-proxy "^1.18.1"
@@ -5709,9 +5746,9 @@ multicast-dns@^7.2.5:
thunky "^1.0.2"
nanoid@^3.3.6:
version "3.3.6"
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.6.tgz#443380c856d6e9f9824267d960b4236ad583ea4c"
integrity sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==
version "3.3.8"
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.8.tgz#b1be3030bee36aaff18bacb375e5cce521684baf"
integrity sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==
negotiator@0.6.3:
version "0.6.3"
@@ -6441,9 +6478,9 @@ prism-react-renderer@^1.2.1:
integrity sha512-IJ+MSwBWKG+SM3b2SUfdrhC+gu01QkV2KmRQgREThBfSQRoufqRfxfHUxpG1WcaFjP+kojcFyO9Qqtpgt3qLCg==
prismjs@^1.23.0:
version "1.29.0"
resolved "https://registry.npmmirror.com/prismjs/-/prismjs-1.29.0.tgz#f113555a8fa9b57c35e637bba27509dcf802dd12"
integrity sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==
version "1.30.0"
resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.30.0.tgz#d9709969d9d4e16403f6f348c63553b19f0975a9"
integrity sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw==
process-nextick-args@~2.0.0:
version "2.0.1"
@@ -6816,10 +6853,10 @@ regenerate@^1.4.2:
resolved "https://registry.npmmirror.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a"
integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==
regenerator-runtime@^0.13.10:
version "0.13.10"
resolved "https://registry.npmmirror.com/regenerator-runtime/-/regenerator-runtime-0.13.10.tgz#ed07b19616bcbec5da6274ebc75ae95634bfc2ee"
integrity sha512-KepLsg4dU12hryUO7bp/axHAKvwGOCV0sGloQtpagJ12ai+ojVDqkeGSiRX1zlq+kjIMZ1t7gpze+26QqtdGqw==
regenerator-runtime@^0.14.0:
version "0.14.1"
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz#356ade10263f685dda125100cd862c1db895327f"
integrity sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==
regenerator-transform@^0.15.0:
version "0.15.0"
@@ -7272,14 +7309,7 @@ send@0.19.0:
range-parser "~1.2.1"
statuses "2.0.1"
serialize-javascript@^6.0.0:
version "6.0.0"
resolved "https://registry.npmmirror.com/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8"
integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==
dependencies:
randombytes "^2.1.0"
serialize-javascript@^6.0.1:
serialize-javascript@^6.0.0, serialize-javascript@^6.0.1:
version "6.0.2"
resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.2.tgz#defa1e055c83bf6d59ea805d8da862254eb6a6c2"
integrity sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==