diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 4de33c741cefa..98a82077cff0f 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -7,7 +7,6 @@ services: # image: mcr.microsoft.com/vscode/devcontainers/rust:bullseye environment: - DENO_PATH=/usr/local/cargo/bin/deno - - PYTHON_PATH=/usr/bin/python3 - NSJAIL_PATH=/bin/nsjail volumes: - .:/workspace:cached diff --git a/.github/workflows/backend-test.yml b/.github/workflows/backend-test.yml index 04668e96e0efc..1cbefbaf91e71 100644 --- a/.github/workflows/backend-test.yml +++ b/.github/workflows/backend-test.yml @@ -42,9 +42,6 @@ jobs: - uses: actions/setup-go@v2 with: go-version: 1.21.5 - - uses: actions/setup-python@v2 - with: - python-version: 3.11 - uses: oven-sh/setup-bun@v2 with: bun-version: 1.1.43 @@ -64,7 +61,7 @@ jobs: deno --version && bun -v && go version && python3 --version && SQLX_OFFLINE=true DATABASE_URL=postgres://postgres:changeme@localhost:5432/windmill - DISABLE_EMBEDDING=true RUST_LOG=info PYTHON_PATH=$(which python) + DISABLE_EMBEDDING=true RUST_LOG=info DENO_PATH=$(which deno) BUN_PATH=$(which bun) GO_PATH=$(which go) UV_PATH=$(which uv) cargo test --features enterprise,deno_core,license,python,rust,scoped_cache --all -- diff --git a/CHANGELOG.md b/CHANGELOG.md index 9afe269c8c58c..0721e6384fd27 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,36 @@ # Changelog +## [1.466.2](https://github.com/windmill-labs/windmill/compare/v1.466.1...v1.466.2) (2025-02-20) + + +### Bug Fixes + +* add proxy envs (http_proxy) to uv install ([affb0b4](https://github.com/windmill-labs/windmill/commit/affb0b4c720551f7f1c7fa5315e3b39e5580b732)) + +## [1.466.1](https://github.com/windmill-labs/windmill/compare/v1.466.0...v1.466.1) (2025-02-20) + + +### Bug Fixes + +* **cli:** improve cli dependency error clarity ([dcc0d35](https://github.com/windmill-labs/windmill/commit/dcc0d35e971ab3df6a0122dc881b968e8221f40f)) +* **cli:** improve dependency job error message (logs in result) ([2c67e84](https://github.com/windmill-labs/windmill/commit/2c67e84abe98a3c43972cf5555536104119c6527)) +* **cli:** improve flow cli dependency error clarity ([d5b3a04](https://github.com/windmill-labs/windmill/commit/d5b3a04b0ab5f003c4c512cc9ba74eb620a3afc1)) +* **python:** PYTHON_PATH overrides python from uv ([39c0dd3](https://github.com/windmill-labs/windmill/commit/39c0dd3736da0722c7e18d84183c0e9b06cf2839)) + +## [1.466.0](https://github.com/windmill-labs/windmill/compare/v1.465.0...v1.466.0) (2025-02-19) + + +### Features + +* add support for gemini ([#5235](https://github.com/windmill-labs/windmill/issues/5235)) ([35d5293](https://github.com/windmill-labs/windmill/commit/35d5293fba47d368e503e9781719e6e9ccc96713)) +* remove `pip` fallback option for python and ansible ([#5186](https://github.com/windmill-labs/windmill/issues/5186)) ([4ad654f](https://github.com/windmill-labs/windmill/commit/4ad654fcf0c603aefc5a9b5c41da1ffa24b99d2d)) + + +### Bug Fixes + +* **apps:** font-size of title text not screen dependent ([44a6a62](https://github.com/windmill-labs/windmill/commit/44a6a62fbe3a9cae79e2d7ab7efd119f559aa374)) +* improve app db explorer handling of always identity columns ([74c0a10](https://github.com/windmill-labs/windmill/commit/74c0a10c3a8a4848341456635f36c0c2061b7943)) + ## [1.465.0](https://github.com/windmill-labs/windmill/compare/v1.464.0...v1.465.0) (2025-02-18) diff --git a/README.md b/README.md index 3f0fef9ff6047..d8ec51b3b0798 100644 --- a/README.md +++ b/README.md @@ -330,7 +330,7 @@ you to have it being synced automatically everyday. | SLACK_SIGNING_SECRET | None | The signing secret of your Slack app. See [Slack documentation](https://api.slack.com/authentication/verifying-requests-from-slack) | Server | | COOKIE_DOMAIN | None | The domain of the cookie. If not set, the cookie will be set by the browser based on the full origin | Server | | DENO_PATH | /usr/bin/deno | The path to the deno binary. | Worker | -| PYTHON_PATH | /usr/local/bin/python3 | The path to the python binary. | Worker | +| PYTHON_PATH | | The path to the python binary if wanting to not have it managed by uv. | Worker | | GO_PATH | /usr/bin/go | The path to the go binary. | Worker | | GOPRIVATE | | The GOPRIVATE env variable to use private go modules | Worker | | GOPROXY | | The GOPROXY env variable to use | Worker | diff --git a/backend/.sqlx/query-08f288d2781d823e109a9e5b8848234ca7d1efeee9661f3901f298da375e73f7.json b/backend/.sqlx/query-08f288d2781d823e109a9e5b8848234ca7d1efeee9661f3901f298da375e73f7.json index 2be39fce267e6..4bcf3c6ce3cf3 100644 --- a/backend/.sqlx/query-08f288d2781d823e109a9e5b8848234ca7d1efeee9661f3901f298da375e73f7.json +++ b/backend/.sqlx/query-08f288d2781d823e109a9e5b8848234ca7d1efeee9661f3901f298da375e73f7.json @@ -130,28 +130,28 @@ }, { "ordinal": 25, - "name": "teams_command_script", - "type_info": "Text" + "name": "ai_models", + "type_info": "VarcharArray" }, { "ordinal": 26, - "name": "teams_team_id", - "type_info": "Text" + "name": "code_completion_model", + "type_info": "Varchar" }, { "ordinal": 27, - "name": "teams_team_name", + "name": "teams_command_script", "type_info": "Text" }, { "ordinal": 28, - "name": "ai_models", - "type_info": "VarcharArray" + "name": "teams_team_id", + "type_info": "Text" }, { "ordinal": 29, - "name": "code_completion_model", - "type_info": "Varchar" + "name": "teams_team_name", + "type_info": "Text" } ], "parameters": { @@ -185,10 +185,10 @@ true, true, true, + false, true, true, true, - false, true ] }, diff --git a/backend/.sqlx/query-24178c21aadc1aed90f31e9362c6505a642c8f04b883c278b07e7ef5956ce121.json b/backend/.sqlx/query-24178c21aadc1aed90f31e9362c6505a642c8f04b883c278b07e7ef5956ce121.json deleted file mode 100644 index d8cca020fec78..0000000000000 --- a/backend/.sqlx/query-24178c21aadc1aed90f31e9362c6505a642c8f04b883c278b07e7ef5956ce121.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT \n \n EXISTS(SELECT 1 FROM websocket_trigger WHERE workspace_id = $1) AS \"websocket_used!\", \n \n EXISTS(SELECT 1 FROM http_trigger WHERE workspace_id = $1) AS \"http_routes_used!\",\n EXISTS(SELECT 1 FROM kafka_trigger WHERE workspace_id = $1) as \"kafka_used!\",\n EXISTS(SELECT 1 FROM nats_trigger WHERE workspace_id = $1) as \"nats_used!\",\n EXISTS(SELECT 1 FROM postgres_trigger WHERE workspace_id = $1) AS \"postgres_used!\"\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "websocket_used!", - "type_info": "Bool" - }, - { - "ordinal": 1, - "name": "http_routes_used!", - "type_info": "Bool" - }, - { - "ordinal": 2, - "name": "kafka_used!", - "type_info": "Bool" - }, - { - "ordinal": 3, - "name": "nats_used!", - "type_info": "Bool" - }, - { - "ordinal": 4, - "name": "postgres_used!", - "type_info": "Bool" - } - ], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [ - null, - null, - null, - null, - null - ] - }, - "hash": "24178c21aadc1aed90f31e9362c6505a642c8f04b883c278b07e7ef5956ce121" -} diff --git a/backend/.sqlx/query-4a804ee30bfe86c4e2c15a9f6511be5adf0dd22cb942fac64b439fb4e20df447.json b/backend/.sqlx/query-4a804ee30bfe86c4e2c15a9f6511be5adf0dd22cb942fac64b439fb4e20df447.json deleted file mode 100644 index 1c75a48938983..0000000000000 --- a/backend/.sqlx/query-4a804ee30bfe86c4e2c15a9f6511be5adf0dd22cb942fac64b439fb4e20df447.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO metrics (id, value) \n VALUES ('no_uv_usage_ansible', $1)\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Jsonb" - ] - }, - "nullable": [] - }, - "hash": "4a804ee30bfe86c4e2c15a9f6511be5adf0dd22cb942fac64b439fb4e20df447" -} diff --git a/backend/.sqlx/query-55cb03040bc2a8c53dd7fbb42bbdcc40f463cbc52d94ed9315cf9a547d4c89f2.json b/backend/.sqlx/query-55cb03040bc2a8c53dd7fbb42bbdcc40f463cbc52d94ed9315cf9a547d4c89f2.json index 9243288c9d321..14685a8bfa6ec 100644 --- a/backend/.sqlx/query-55cb03040bc2a8c53dd7fbb42bbdcc40f463cbc52d94ed9315cf9a547d4c89f2.json +++ b/backend/.sqlx/query-55cb03040bc2a8c53dd7fbb42bbdcc40f463cbc52d94ed9315cf9a547d4c89f2.json @@ -130,28 +130,28 @@ }, { "ordinal": 25, - "name": "teams_command_script", - "type_info": "Text" + "name": "ai_models", + "type_info": "VarcharArray" }, { "ordinal": 26, - "name": "teams_team_id", - "type_info": "Text" + "name": "code_completion_model", + "type_info": "Varchar" }, { "ordinal": 27, - "name": "teams_team_name", + "name": "teams_command_script", "type_info": "Text" }, { "ordinal": 28, - "name": "ai_models", - "type_info": "VarcharArray" + "name": "teams_team_id", + "type_info": "Text" }, { "ordinal": 29, - "name": "code_completion_model", - "type_info": "Varchar" + "name": "teams_team_name", + "type_info": "Text" } ], "parameters": { @@ -185,10 +185,10 @@ true, true, true, + false, true, true, true, - false, true ] }, diff --git a/backend/.sqlx/query-65c339164e7669360d231d70105849e72bdc197c17c0fc51777c1dc9267e2daf.json b/backend/.sqlx/query-65c339164e7669360d231d70105849e72bdc197c17c0fc51777c1dc9267e2daf.json deleted file mode 100644 index d52d46f1ddb7c..0000000000000 --- a/backend/.sqlx/query-65c339164e7669360d231d70105849e72bdc197c17c0fc51777c1dc9267e2daf.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE workspace_settings\n SET teams_command_script = NULL,\n teams_team_id = NULL,\n teams_team_name = NULL\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [] - }, - "nullable": [] - }, - "hash": "65c339164e7669360d231d70105849e72bdc197c17c0fc51777c1dc9267e2daf" -} diff --git a/backend/.sqlx/query-81b06122c7a12a314d8905ba5c7c14aa7614f2610e79a8c7302eaa63fb74984d.json b/backend/.sqlx/query-81b06122c7a12a314d8905ba5c7c14aa7614f2610e79a8c7302eaa63fb74984d.json deleted file mode 100644 index 91718513bbb6b..0000000000000 --- a/backend/.sqlx/query-81b06122c7a12a314d8905ba5c7c14aa7614f2610e79a8c7302eaa63fb74984d.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE global_settings\n SET value = (\n SELECT COALESCE(jsonb_agg(elem), '[]'::jsonb)\n FROM jsonb_array_elements(value) AS elem\n WHERE NOT (elem ? 'teams_channel')\n )\n WHERE name = 'critical_error_channels'\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [] - }, - "nullable": [] - }, - "hash": "81b06122c7a12a314d8905ba5c7c14aa7614f2610e79a8c7302eaa63fb74984d" -} diff --git a/backend/.sqlx/query-ddf2eccb78a310ed00c7d8b9c3f05d394a7cbcf0038c72a78add5c7b02ef5927.json b/backend/.sqlx/query-ddf2eccb78a310ed00c7d8b9c3f05d394a7cbcf0038c72a78add5c7b02ef5927.json index 5bfff47576491..c2dfed73a2a83 100644 --- a/backend/.sqlx/query-ddf2eccb78a310ed00c7d8b9c3f05d394a7cbcf0038c72a78add5c7b02ef5927.json +++ b/backend/.sqlx/query-ddf2eccb78a310ed00c7d8b9c3f05d394a7cbcf0038c72a78add5c7b02ef5927.json @@ -15,7 +15,7 @@ ] }, "nullable": [ - true + null ] }, "hash": "ddf2eccb78a310ed00c7d8b9c3f05d394a7cbcf0038c72a78add5c7b02ef5927" diff --git a/backend/.sqlx/query-e565f3b2e51059f563d18a8a9442bcae9640cee7b936820cb46c011222a77ff0.json b/backend/.sqlx/query-e565f3b2e51059f563d18a8a9442bcae9640cee7b936820cb46c011222a77ff0.json deleted file mode 100644 index 4c2cd96ca9945..0000000000000 --- a/backend/.sqlx/query-e565f3b2e51059f563d18a8a9442bcae9640cee7b936820cb46c011222a77ff0.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "UPDATE global_settings SET value = $1 WHERE name = 'teams'", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Jsonb" - ] - }, - "nullable": [] - }, - "hash": "e565f3b2e51059f563d18a8a9442bcae9640cee7b936820cb46c011222a77ff0" -} diff --git a/backend/.sqlx/query-ed318070b26861fda2d591a4356fdbeb6c7fdc965be43bddb010fd8299af1286.json b/backend/.sqlx/query-ed318070b26861fda2d591a4356fdbeb6c7fdc965be43bddb010fd8299af1286.json deleted file mode 100644 index e18818dc3afea..0000000000000 --- a/backend/.sqlx/query-ed318070b26861fda2d591a4356fdbeb6c7fdc965be43bddb010fd8299af1286.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO metrics (id, value) \n VALUES ('no_uv_usage_py', $1)\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Jsonb" - ] - }, - "nullable": [] - }, - "hash": "ed318070b26861fda2d591a4356fdbeb6c7fdc965be43bddb010fd8299af1286" -} diff --git a/backend/.sqlx/query-fec6d5674dc6b5a6a0ece419c40508835affcb7679a48f2a443777e829bd1e74.json b/backend/.sqlx/query-fec6d5674dc6b5a6a0ece419c40508835affcb7679a48f2a443777e829bd1e74.json index 649ab85650b69..de2e819af209d 100644 --- a/backend/.sqlx/query-fec6d5674dc6b5a6a0ece419c40508835affcb7679a48f2a443777e829bd1e74.json +++ b/backend/.sqlx/query-fec6d5674dc6b5a6a0ece419c40508835affcb7679a48f2a443777e829bd1e74.json @@ -41,11 +41,11 @@ ] }, "nullable": [ - false, - false, - false, - false, - false, + true, + true, + true, + true, + true, true ] }, diff --git a/backend/Cargo.lock b/backend/Cargo.lock index e97f2b5ae74a6..4bf6819cf037b 100644 --- a/backend/Cargo.lock +++ b/backend/Cargo.lock @@ -2683,9 +2683,9 @@ dependencies = [ [[package]] name = "deno_media_type" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "600222d059ab31ff31182b3e12615df2134a9e01605836b78ad8df91ba39eab3" +checksum = "480223262efd08f96b3be5f0457c82bac7296e70dc4e7ef7350751f66293812c" dependencies = [ "data-url", "serde", @@ -4041,9 +4041,9 @@ dependencies = [ [[package]] name = "hickory-proto" -version = "0.24.3" +version = "0.24.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ad3d6d98c648ed628df039541a5577bee1a7c83e9e16fe3dbedeea4cdfeb971" +checksum = "92652067c9ce6f66ce53cc38d1169daa36e6e7eb7dd3b63b5103bd9d97117248" dependencies = [ "async-trait", "cfg-if", @@ -4066,9 +4066,9 @@ dependencies = [ [[package]] name = "hickory-resolver" -version = "0.24.3" +version = "0.24.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf287bde7b776e85d7188e6e5db7cf410a2f9531fe82817eb87feed034c8d14" +checksum = "cbb117a1ca520e111743ab2f6688eddee69db4e0ea242545a604dce8a66fd22e" dependencies = [ "cfg-if", "futures-util", @@ -5434,9 +5434,9 @@ dependencies = [ [[package]] name = "native-tls" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dab59f8e050d5df8e4dd87d9206fb6f65a483e20ac9fda365ade4fab353196c" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" dependencies = [ "libc", "log", @@ -6819,7 +6819,7 @@ checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" dependencies = [ "rand_chacha 0.9.0", "rand_core 0.9.1", - "zerocopy 0.8.18", + "zerocopy 0.8.20", ] [[package]] @@ -6877,7 +6877,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a88e0da7a2c97baa202165137c158d0a2e824ac465d13d81046727b34cb247d3" dependencies = [ "getrandom 0.3.1", - "zerocopy 0.8.18", + "zerocopy 0.8.20", ] [[package]] @@ -9849,7 +9849,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "winnow 0.7.2", + "winnow 0.7.3", ] [[package]] @@ -10367,9 +10367,9 @@ checksum = "2f322b60f6b9736017344fa0635d64be2f458fbc04eef65f6be22976dd1ffd5b" [[package]] name = "unicode-ident" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034" +checksum = "00e2473a93778eb0bad35909dff6a10d28e63f792f16ed15e404fca9d5eeedbe" [[package]] name = "unicode-normalization" @@ -10881,7 +10881,7 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windmill" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "axum", @@ -10924,7 +10924,7 @@ dependencies = [ [[package]] name = "windmill-api" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "argon2", @@ -11020,7 +11020,7 @@ dependencies = [ [[package]] name = "windmill-api-client" -version = "1.465.0" +version = "1.466.2" dependencies = [ "base64 0.22.1", "chrono", @@ -11038,7 +11038,7 @@ dependencies = [ [[package]] name = "windmill-audit" -version = "1.465.0" +version = "1.466.2" dependencies = [ "chrono", "serde", @@ -11051,7 +11051,7 @@ dependencies = [ [[package]] name = "windmill-autoscaling" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "serde", @@ -11065,7 +11065,7 @@ dependencies = [ [[package]] name = "windmill-common" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "async-stream", @@ -11124,7 +11124,7 @@ dependencies = [ [[package]] name = "windmill-git-sync" -version = "1.465.0" +version = "1.466.2" dependencies = [ "regex", "serde", @@ -11138,7 +11138,7 @@ dependencies = [ [[package]] name = "windmill-indexer" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "bytes", @@ -11161,7 +11161,7 @@ dependencies = [ [[package]] name = "windmill-macros" -version = "1.465.0" +version = "1.466.2" dependencies = [ "itertools 0.14.0", "lazy_static", @@ -11173,7 +11173,7 @@ dependencies = [ [[package]] name = "windmill-parser" -version = "1.465.0" +version = "1.466.2" dependencies = [ "convert_case 0.6.0", "serde", @@ -11182,7 +11182,7 @@ dependencies = [ [[package]] name = "windmill-parser-bash" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "lazy_static", @@ -11194,7 +11194,7 @@ dependencies = [ [[package]] name = "windmill-parser-csharp" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "serde_json", @@ -11206,7 +11206,7 @@ dependencies = [ [[package]] name = "windmill-parser-go" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "gosyn", @@ -11218,7 +11218,7 @@ dependencies = [ [[package]] name = "windmill-parser-graphql" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "lazy_static", @@ -11230,7 +11230,7 @@ dependencies = [ [[package]] name = "windmill-parser-php" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "itertools 0.14.0", @@ -11241,7 +11241,7 @@ dependencies = [ [[package]] name = "windmill-parser-py" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "itertools 0.14.0", @@ -11252,7 +11252,7 @@ dependencies = [ [[package]] name = "windmill-parser-py-imports" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "async-recursion", @@ -11272,7 +11272,7 @@ dependencies = [ [[package]] name = "windmill-parser-rust" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "convert_case 0.6.0", @@ -11289,7 +11289,7 @@ dependencies = [ [[package]] name = "windmill-parser-sql" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "lazy_static", @@ -11301,7 +11301,7 @@ dependencies = [ [[package]] name = "windmill-parser-ts" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "lazy_static", @@ -11319,7 +11319,7 @@ dependencies = [ [[package]] name = "windmill-parser-wasm" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "getrandom 0.2.15", @@ -11341,7 +11341,7 @@ dependencies = [ [[package]] name = "windmill-parser-yaml" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "serde_json", @@ -11351,7 +11351,7 @@ dependencies = [ [[package]] name = "windmill-queue" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "async-recursion", @@ -11384,7 +11384,7 @@ dependencies = [ [[package]] name = "windmill-sql-datatype-parser-wasm" -version = "1.465.0" +version = "1.466.2" dependencies = [ "wasm-bindgen", "wasm-bindgen-test", @@ -11394,7 +11394,7 @@ dependencies = [ [[package]] name = "windmill-worker" -version = "1.465.0" +version = "1.466.2" dependencies = [ "anyhow", "async-recursion", @@ -11678,9 +11678,9 @@ dependencies = [ [[package]] name = "winnow" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59690dea168f2198d1a3b0cac23b8063efcd11012f10ae4698f284808c8ef603" +checksum = "0e7f4ea97f6f78012141bcdb6a216b2609f0979ada50b20ca5b52dde2eac2bb1" dependencies = [ "memchr", ] @@ -11808,11 +11808,11 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.8.18" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79386d31a42a4996e3336b0919ddb90f81112af416270cff95b5f5af22b839c2" +checksum = "dde3bb8c68a8f3f1ed4ac9221aad6b10cece3e60a8e2ea54a6a2dec806d0084c" dependencies = [ - "zerocopy-derive 0.8.18", + "zerocopy-derive 0.8.20", ] [[package]] @@ -11828,9 +11828,9 @@ dependencies = [ [[package]] name = "zerocopy-derive" -version = "0.8.18" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76331675d372f91bf8d17e13afbd5fe639200b73d01f0fc748bb059f9cca2db7" +checksum = "eea57037071898bf96a6da35fd626f4f27e9cee3ead2a6c703cf09d472b2e700" dependencies = [ "proc-macro2", "quote", diff --git a/backend/Cargo.toml b/backend/Cargo.toml index 24fee331b965c..24fc5903b7e9a 100644 --- a/backend/Cargo.toml +++ b/backend/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "windmill" -version = "1.465.0" +version = "1.466.2" authors.workspace = true edition.workspace = true @@ -30,7 +30,7 @@ members = [ ] [workspace.package] -version = "1.465.0" +version = "1.466.2" authors = ["Ruben Fiszel "] edition = "2021" diff --git a/backend/src/main.rs b/backend/src/main.rs index ab74895ca8bf6..afd7ea08f4233 100644 --- a/backend/src/main.rs +++ b/backend/src/main.rs @@ -65,9 +65,9 @@ use windmill_common::global_settings::OBJECT_STORE_CACHE_CONFIG_SETTING; use windmill_worker::{ get_hub_script_content_and_requirements, BUN_BUNDLE_CACHE_DIR, BUN_CACHE_DIR, CSHARP_CACHE_DIR, DENO_CACHE_DIR, DENO_CACHE_DIR_DEPS, DENO_CACHE_DIR_NPM, GO_BIN_CACHE_DIR, GO_CACHE_DIR, - LOCK_CACHE_DIR, PIP_CACHE_DIR, POWERSHELL_CACHE_DIR, PY310_CACHE_DIR, PY311_CACHE_DIR, - PY312_CACHE_DIR, PY313_CACHE_DIR, RUST_CACHE_DIR, TAR_PIP_CACHE_DIR, TAR_PY310_CACHE_DIR, - TAR_PY311_CACHE_DIR, TAR_PY312_CACHE_DIR, TAR_PY313_CACHE_DIR, UV_CACHE_DIR, + POWERSHELL_CACHE_DIR, PY310_CACHE_DIR, PY311_CACHE_DIR, PY312_CACHE_DIR, PY313_CACHE_DIR, + RUST_CACHE_DIR, TAR_PY310_CACHE_DIR, TAR_PY311_CACHE_DIR, TAR_PY312_CACHE_DIR, + TAR_PY313_CACHE_DIR, UV_CACHE_DIR, }; use crate::monitor::{ @@ -1041,10 +1041,8 @@ pub async fn run_workers( let mut handles = Vec::with_capacity(num_workers as usize); for x in [ - LOCK_CACHE_DIR, TMP_LOGS_DIR, UV_CACHE_DIR, - TAR_PIP_CACHE_DIR, DENO_CACHE_DIR, DENO_CACHE_DIR_DEPS, DENO_CACHE_DIR_NPM, @@ -1057,7 +1055,6 @@ pub async fn run_workers( TAR_PY311_CACHE_DIR, TAR_PY312_CACHE_DIR, TAR_PY313_CACHE_DIR, - PIP_CACHE_DIR, BUN_BUNDLE_CACHE_DIR, GO_CACHE_DIR, GO_BIN_CACHE_DIR, diff --git a/backend/tests/worker.rs b/backend/tests/worker.rs index e1f73f587175a..31fbeef7eaee8 100644 --- a/backend/tests/worker.rs +++ b/backend/tests/worker.rs @@ -1015,15 +1015,10 @@ fn spawn_test_worker( tokio::sync::broadcast::Sender<()>, tokio::task::JoinHandle<()>, ) { - for x in [ - windmill_worker::LOCK_CACHE_DIR, - windmill_worker::GO_BIN_CACHE_DIR, - ] { - std::fs::DirBuilder::new() - .recursive(true) - .create(x) - .expect("could not create initial worker dir"); - } + std::fs::DirBuilder::new() + .recursive(true) + .create(windmill_worker::GO_BIN_CACHE_DIR) + .expect("could not create initial worker dir"); let (tx, rx) = tokio::sync::broadcast::channel(1); let db = db.to_owned(); diff --git a/backend/windmill-api/openapi.yaml b/backend/windmill-api/openapi.yaml index f8be04f8db6f3..27c7bb4437b54 100644 --- a/backend/windmill-api/openapi.yaml +++ b/backend/windmill-api/openapi.yaml @@ -1,7 +1,7 @@ openapi: "3.0.3" info: - version: 1.465.0 + version: 1.466.2 title: Windmill API contact: @@ -11979,7 +11979,7 @@ components: AIProvider: type: string - enum: [openai, anthropic, mistral, deepseek, groq, openrouter, customai] + enum: [openai, anthropic, mistral, deepseek, googleai, groq, openrouter, customai] AIResource: type: object diff --git a/backend/windmill-api/src/ai.rs b/backend/windmill-api/src/ai.rs index d77943df9d529..caec56a393c1c 100644 --- a/backend/windmill-api/src/ai.rs +++ b/backend/windmill-api/src/ai.rs @@ -380,6 +380,7 @@ pub enum AIProvider { Anthropic, Mistral, DeepSeek, + GoogleAI, Groq, OpenRouter, CustomAI, @@ -389,6 +390,9 @@ impl AIProvider { pub fn get_openai_compatible_base_url(&self) -> Result> { match self { AIProvider::DeepSeek => Ok(Some("https://api.deepseek.com/v1".to_string())), + AIProvider::GoogleAI => Ok(Some( + "https://generativelanguage.googleapis.com/v1beta/openai".to_string(), + )), AIProvider::Groq => Ok(Some("https://api.groq.com/openai/v1".to_string())), AIProvider::OpenRouter => Ok(Some("https://openrouter.ai/api/v1".to_string())), AIProvider::CustomAI => Ok(None), @@ -409,6 +413,7 @@ impl TryFrom<&str> for AIProvider { "groq" => Ok(AIProvider::Groq), "openrouter" => Ok(AIProvider::OpenRouter), "deepseek" => Ok(AIProvider::DeepSeek), + "googleai" => Ok(AIProvider::GoogleAI), "customai" => Ok(AIProvider::CustomAI), _ => Err(Error::BadRequest(format!("Invalid AI provider: {}", s))), } @@ -472,7 +477,9 @@ async fn proxy( .await?; if ai_resource.is_none() { - return Err(Error::internal_err("AI resource not configured".to_string())); + return Err(Error::internal_err( + "AI resource not configured".to_string(), + )); } let ai_resource = serde_json::from_value::(ai_resource.unwrap()) diff --git a/backend/windmill-common/src/ee.rs b/backend/windmill-common/src/ee.rs index e25d36bee1fe9..2a820475f77f2 100644 --- a/backend/windmill-common/src/ee.rs +++ b/backend/windmill-common/src/ee.rs @@ -29,6 +29,15 @@ pub async fn get_license_plan() -> LicensePlan { pub enum CriticalErrorChannel { Email { email: String }, Slack { slack_channel: String }, + Teams { teams_channel: TeamsChannel }, +} + +#[derive(Deserialize)] +pub struct TeamsChannel { + pub team_id: String, + pub team_name: String, + pub channel_id: String, + pub channel_name: String, } pub enum CriticalAlertKind { diff --git a/backend/windmill-common/src/worker.rs b/backend/windmill-common/src/worker.rs index eb5bd1d6f1c4b..f07226265a6fc 100644 --- a/backend/windmill-common/src/worker.rs +++ b/backend/windmill-common/src/worker.rs @@ -362,9 +362,6 @@ fn parse_file(path: &str) -> Option { #[annotations("#")] pub struct PythonAnnotations { pub no_cache: bool, - pub no_uv: bool, - pub no_uv_install: bool, - pub no_uv_compile: bool, pub no_postinstall: bool, pub py310: bool, pub py311: bool, diff --git a/backend/windmill-worker/nsjail/download.py.pip.config.proto b/backend/windmill-worker/nsjail/download.py.pip.config.proto deleted file mode 100644 index 6e2a8a19740c1..0000000000000 --- a/backend/windmill-worker/nsjail/download.py.pip.config.proto +++ /dev/null @@ -1,86 +0,0 @@ -name: "python download pip" - -mode: ONCE -hostname: "python" -log_level: ERROR -time_limit: 900 - -rlimit_as: 2048 -rlimit_cpu: 1000 -rlimit_fsize: 1024 -rlimit_nofile: 64 - -envar: "HOME=/user" -envar: "LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH" - -cwd: "/tmp" - -clone_newnet: false -clone_newuser: {CLONE_NEWUSER} - -keep_caps: true -keep_env: true - -mount { - src: "/bin" - dst: "/bin" - is_bind: true -} - -mount { - src: "/lib" - dst: "/lib" - is_bind: true -} - -mount { - src: "/lib64" - dst: "/lib64" - is_bind: true - mandatory: false -} - -mount { - src: "/usr" - dst: "/usr" - is_bind: true -} - -mount { - src: "/etc" - dst: "/etc" - is_bind: true -} - -mount { - src: "/dev/null" - dst: "/dev/null" - is_bind: true - rw: true -} - -mount { - dst: "/tmp" - fstype: "tmpfs" - rw: true - options: "size=500000000" -} - - -mount { - src: "{WORKER_DIR}/download_deps.py.pip.sh" - dst: "/download_deps.sh" - is_bind: true -} - -mount { - src: "/dev/urandom" - dst: "/dev/urandom" - is_bind: true -} - -exec_bin { - path: "/bin/sh" - arg: "/download_deps.sh" -} - diff --git a/backend/windmill-worker/nsjail/download_deps.py.pip.sh b/backend/windmill-worker/nsjail/download_deps.py.pip.sh deleted file mode 100755 index efd627483231e..0000000000000 --- a/backend/windmill-worker/nsjail/download_deps.py.pip.sh +++ /dev/null @@ -1,24 +0,0 @@ -#/bin/sh - -INDEX_URL_ARG=$([ -z "$INDEX_URL" ] && echo ""|| echo "--index-url $INDEX_URL" ) -EXTRA_INDEX_URL_ARG=$([ -z "$EXTRA_INDEX_URL" ] && echo ""|| echo "--extra-index-url $EXTRA_INDEX_URL" ) -TRUSTED_HOST_ARG=$([ -z "$TRUSTED_HOST" ] && echo "" || echo "--trusted-host $TRUSTED_HOST") - -if [ ! -z "$INDEX_URL" ] -then - echo "\$INDEX_URL is set to $INDEX_URL" -fi - -if [ ! -z "$EXTRA_INDEX_URL" ] -then - echo "\$EXTRA_INDEX_URL is set to $EXTRA_INDEX_URL" -fi - -if [ ! -z "$TRUSTED_HOST" ] -then - echo "\$TRUSTED_HOST is set to $TRUSTED_HOST" -fi - -CMD="/usr/local/bin/python3 -m pip install -v \"$REQ\" -I -t \"$TARGET\" --no-cache --no-color --no-deps --isolated --no-warn-conflicts --disable-pip-version-check $INDEX_URL_ARG $EXTRA_INDEX_URL_ARG $TRUSTED_HOST_ARG" -echo $CMD -eval $CMD diff --git a/backend/windmill-worker/src/ansible_executor.rs b/backend/windmill-worker/src/ansible_executor.rs index fee2eaa119d6c..b8a57ded16ea3 100644 --- a/backend/windmill-worker/src/ansible_executor.rs +++ b/backend/windmill-worker/src/ansible_executor.rs @@ -2,11 +2,7 @@ use std::{collections::HashMap, os::unix::fs::PermissionsExt, path::PathBuf, process::Stdio}; #[cfg(windows)] -use std::{ - collections::HashMap, - path::{Path, PathBuf}, - process::Stdio, -}; +use std::{collections::HashMap, path::PathBuf, process::Stdio}; use anyhow::anyhow; use itertools::Itertools; @@ -86,7 +82,6 @@ async fn handle_ansible_python_deps( &mut Some(occupancy_metrics), PyVersion::Py311, false, - false, ) .await .map_err(|e| { @@ -113,7 +108,6 @@ async fn handle_ansible_python_deps( worker_dir, &mut Some(occupancy_metrics), crate::python_executor::PyVersion::Py311, - false, ) .await?; additional_python_paths.append(&mut venv_path); diff --git a/backend/windmill-worker/src/global_cache.rs b/backend/windmill-worker/src/global_cache.rs index 8239ab77d4c77..3bc652d8bedb2 100644 --- a/backend/windmill-worker/src/global_cache.rs +++ b/backend/windmill-worker/src/global_cache.rs @@ -22,11 +22,10 @@ pub async fn build_tar_and_push( folder: String, // python_311 python_xyz: String, - no_uv: bool, ) -> error::Result<()> { use object_store::path::Path; - use crate::{TAR_PIP_CACHE_DIR, TAR_PYBASE_CACHE_DIR}; + use crate::TAR_PYBASE_CACHE_DIR; tracing::info!("Started building and pushing piptar {folder}"); let start = Instant::now(); @@ -34,11 +33,7 @@ pub async fn build_tar_and_push( // e.g. tiny==1.0.0 let folder_name = folder.split("/").last().unwrap(); - let prefix = if no_uv { - TAR_PIP_CACHE_DIR - } else { - &format!("{TAR_PYBASE_CACHE_DIR}/{}", python_xyz) - }; + let prefix = &format!("{TAR_PYBASE_CACHE_DIR}/{}", python_xyz); let tar_path = format!("{prefix}/{folder_name}_tar.tar",); let tar_file = std::fs::File::create(&tar_path)?; @@ -59,10 +54,7 @@ pub async fn build_tar_and_push( // })?; if let Err(e) = s3_client .put( - &Path::from(format!( - "/tar/{TARGET}/{}/{folder_name}.tar", - if no_uv { "pip" } else { &python_xyz } - )), + &Path::from(format!("/tar/{TARGET}/{python_xyz}/{folder_name}.tar")), std::fs::read(&tar_path)?.into(), ) .await @@ -92,7 +84,6 @@ pub async fn pull_from_tar( folder: String, // python_311 python_xyz: String, - no_uv: bool, ) -> error::Result<()> { use windmill_common::s3_helpers::attempt_fetch_bytes; @@ -102,10 +93,7 @@ pub async fn pull_from_tar( let start = Instant::now(); - let tar_path = format!( - "tar/{TARGET}/{}/{folder_name}.tar", - if no_uv { "pip".to_owned() } else { python_xyz } - ); + let tar_path = format!("tar/{TARGET}/{python_xyz}/{folder_name}.tar"); let bytes = attempt_fetch_bytes(client, &tar_path).await?; extract_tar(bytes, &folder).await.map_err(|e| { diff --git a/backend/windmill-worker/src/python_executor.rs b/backend/windmill-worker/src/python_executor.rs index 531c41887a909..3485114d8976d 100644 --- a/backend/windmill-worker/src/python_executor.rs +++ b/backend/windmill-worker/src/python_executor.rs @@ -39,8 +39,10 @@ use std::env::var; use windmill_queue::{append_logs, CanceledBy}; lazy_static::lazy_static! { - static ref PYTHON_PATH: String = - var("PYTHON_PATH").unwrap_or_else(|_| "/usr/local/bin/python3".to_string()); + static ref PYTHON_PATH: Option = var("PYTHON_PATH").ok().map(|v| { + tracing::warn!("PYTHON_PATH is set to {} and thus python will not be managed by uv and stay static regardless of annotation and instance settings. NOT RECOMMENDED", v); + v + }); static ref UV_PATH: String = var("UV_PATH").unwrap_or_else(|_| "/usr/local/bin/uv".to_string()); @@ -48,31 +50,18 @@ lazy_static::lazy_static! { static ref PY_CONCURRENT_DOWNLOADS: usize = var("PY_CONCURRENT_DOWNLOADS").ok().map(|flag| flag.parse().unwrap_or(20)).unwrap_or(20); - static ref FLOCK_PATH: String = - var("FLOCK_PATH").unwrap_or_else(|_| "/usr/bin/flock".to_string()); static ref NON_ALPHANUM_CHAR: Regex = regex::Regex::new(r"[^0-9A-Za-z=.-]").unwrap(); static ref TRUSTED_HOST: Option = var("PY_TRUSTED_HOST").ok().or(var("PIP_TRUSTED_HOST").ok()); static ref INDEX_CERT: Option = var("PY_INDEX_CERT").ok().or(var("PIP_INDEX_CERT").ok()); static ref NATIVE_CERT: bool = var("PY_NATIVE_CERT").ok().or(var("UV_NATIVE_TLS").ok()).map(|flag| flag == "true").unwrap_or(false); - pub static ref USE_SYSTEM_PYTHON: bool = var("USE_SYSTEM_PYTHON") - .ok().map(|flag| flag == "true").unwrap_or(false); - - pub static ref USE_PIP_COMPILE: bool = var("USE_PIP_COMPILE") - .ok().map(|flag| flag == "true").unwrap_or(false); - - pub static ref USE_PIP_INSTALL: bool = var("USE_PIP_INSTALL") - .ok().map(|flag| flag == "true").unwrap_or(false); - static ref RELATIVE_IMPORT_REGEX: Regex = Regex::new(r#"(import|from)\s(((u|f)\.)|\.)"#).unwrap(); static ref EPHEMERAL_TOKEN_CMD: Option = var("EPHEMERAL_TOKEN_CMD").ok(); } const NSJAIL_CONFIG_DOWNLOAD_PY_CONTENT: &str = include_str!("../nsjail/download.py.config.proto"); -const NSJAIL_CONFIG_DOWNLOAD_PY_CONTENT_FALLBACK: &str = - include_str!("../nsjail/download.py.pip.config.proto"); const NSJAIL_CONFIG_RUN_PYTHON3_CONTENT: &str = include_str!("../nsjail/run.python3.config.proto"); const RELATIVE_PYTHON_LOADER: &str = include_str!("../loader.py"); @@ -89,8 +78,8 @@ use crate::{ }, handle_child::handle_child, AuthedClientBackgroundTask, DISABLE_NSJAIL, DISABLE_NUSER, HOME_ENV, INSTANCE_PYTHON_VERSION, - LOCK_CACHE_DIR, NSJAIL_PATH, PATH_ENV, PIP_CACHE_DIR, PIP_EXTRA_INDEX_URL, PIP_INDEX_URL, - PROXY_ENVS, PY_INSTALL_DIR, TZ_ENV, UV_CACHE_DIR, + NSJAIL_PATH, PATH_ENV, PIP_EXTRA_INDEX_URL, PIP_INDEX_URL, PROXY_ENVS, PY_INSTALL_DIR, TZ_ENV, + UV_CACHE_DIR, }; // To change latest stable version: @@ -311,6 +300,7 @@ impl PyVersion { .env_clear() .env("HOME", HOME_ENV.to_string()) .env("PATH", PATH_ENV.to_string()) + .envs(PROXY_ENVS.clone()) .args(["python", "install", v, "--python-preference=only-managed"]) // TODO: Do we need these? .envs([("UV_PYTHON_INSTALL_DIR", PY_INSTALL_DIR)]) @@ -451,8 +441,6 @@ pub async fn uv_pip_compile( py_version: PyVersion, // Debug-only flag no_cache: bool, - // Fallback to pip-compile. Will be removed in future - mut no_uv: bool, ) -> error::Result { let mut logs = String::new(); logs.push_str(&format!("\nresolving dependencies...")); @@ -495,19 +483,8 @@ pub async fn uv_pip_compile( #[cfg(feature = "enterprise")] let requirements = replace_pip_secret(db, w_id, &requirements, worker_name, job_id).await?; - let mut req_hash = format!("py-{}", calculate_hash(&requirements)); - - if no_uv || *USE_PIP_COMPILE { - logs.push_str(&format!("\nFallback to pip-compile (Deprecated!)")); - // Set no_uv if not setted - no_uv = true; - // Make sure that if we put #no_uv (switch to pip-compile) to python code or used `USE_PIP_COMPILE=true` variable. - // Windmill will recalculate lockfile using pip-compile and dont take potentially broken lockfile (generated by uv) from cache (our db). - // It will recalculate lockfile even if inputs have not been changed. - req_hash.push_str("-no_uv"); - // Will be in format: - // py-000..000-no_uv - } + let req_hash = format!("py-{}", calculate_hash(&requirements)); + if !no_cache { if let Some(cached) = sqlx::query_scalar!( "SELECT lockfile FROM pip_resolution_cache WHERE hash = $1", @@ -530,76 +507,7 @@ pub async fn uv_pip_compile( write_file(job_dir, file, &requirements)?; - // Fallback pip-compile. Will be removed in future - if no_uv { - tracing::debug!("Fallback to pip-compile"); - - let mut args = vec![ - "-q", - "--no-header", - file, - "--resolver=backtracking", - "--strip-extras", - ]; - let mut pip_args = vec![]; - let pip_extra_index_url = PIP_EXTRA_INDEX_URL - .read() - .await - .clone() - .map(handle_ephemeral_token); - if let Some(url) = pip_extra_index_url.as_ref() { - url.split(",").for_each(|url| { - args.extend(["--extra-index-url", url]); - pip_args.push(format!("--extra-index-url {}", url)); - }); - args.push("--no-emit-index-url"); - } - let pip_index_url = PIP_INDEX_URL - .read() - .await - .clone() - .map(handle_ephemeral_token); - if let Some(url) = pip_index_url.as_ref() { - args.extend(["--index-url", url, "--no-emit-index-url"]); - pip_args.push(format!("--index-url {}", url)); - } - if let Some(host) = TRUSTED_HOST.as_ref() { - args.extend(["--trusted-host", host]); - } - if let Some(cert_path) = INDEX_CERT.as_ref() { - args.extend(["--cert", cert_path]); - } - let pip_args_str = pip_args.join(" "); - if pip_args.len() > 0 { - args.extend(["--pip-args", &pip_args_str]); - } - tracing::debug!("pip-compile args: {:?}", args); - - let mut child_cmd = Command::new("pip-compile"); - child_cmd - .current_dir(job_dir) - .args(args) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()); - let child_process = start_child_process(child_cmd, "pip-compile").await?; - append_logs(&job_id, &w_id, logs, db).await; - handle_child( - job_id, - db, - mem_peak, - canceled_by, - child_process, - false, - worker_name, - &w_id, - "pip-compile", - None, - false, - occupancy_metrics, - ) - .await - .map_err(|e| Error::ExecutionErr(format!("Lock file generation failed: {e:?}")))?; - } else { + { // Make sure we have python runtime installed py_version .get_python(job_id, mem_peak, db, worker_name, w_id, occupancy_metrics) @@ -868,6 +776,30 @@ fn copy_dir_recursively(src: &Path, dst: &Path) -> windmill_common::error::Resul Ok(()) } +async fn get_python_path( + py_version: PyVersion, + worker_name: &str, + job_id: &Uuid, + w_id: &str, + mem_peak: &mut i32, + db: &sqlx::Pool, + occupancy_metrics: &mut Option<&mut OccupancyMetrics>, +) -> windmill_common::error::Result { + let python_path = if let Some(python_path) = PYTHON_PATH.clone() { + python_path + } else if let Some(python_path) = py_version + .get_python(&job_id, mem_peak, db, worker_name, w_id, occupancy_metrics) + .await? + { + python_path + } else { + return Err(Error::ExecutionErr(format!( + "uv could not manage python path. Please manage it manually by setting PYTHON_PATH environment variable to your python binary path" + ))); + }; + Ok(python_path) +} + #[tracing::instrument(level = "trace", skip_all)] pub async fn handle_python_job( requirements_o: Option<&String>, @@ -904,25 +836,18 @@ pub async fn handle_python_job( ) .await?; - let PythonAnnotations { no_uv, no_postinstall, .. } = PythonAnnotations::parse(inner_content); + let PythonAnnotations { no_postinstall, .. } = PythonAnnotations::parse(inner_content); tracing::debug!("Finished handling python dependencies"); - let python_path = if no_uv { - PYTHON_PATH.clone() - } else if let Some(python_path) = py_version - .get_python( - &job.id, - mem_peak, - db, - worker_name, - &job.workspace_id, - &mut Some(occupancy_metrics), - ) - .await? - { - python_path - } else { - PYTHON_PATH.clone() - }; + let python_path = get_python_path( + py_version, + worker_name, + &job.id, + &job.workspace_id, + mem_peak, + db, + &mut Some(occupancy_metrics), + ) + .await?; if !no_postinstall { if let Err(e) = postinstall(&mut additional_python_paths, job_dir, job, db).await { @@ -931,15 +856,7 @@ pub async fn handle_python_job( tracing::debug!("Finished deps postinstall stage"); } - if no_uv { - append_logs( - &job.id, - &job.workspace_id, - format!("\n\n--- SYSTEM PYTHON (Fallback) CODE EXECUTION ---\n",), - db, - ) - .await; - } else { + { append_logs( &job.id, &job.workspace_id, @@ -1525,7 +1442,6 @@ async fn handle_python_deps( occupancy_metrics, annotated_pyv.unwrap_or(instance_pyv), annotations.no_cache, - annotations.no_uv || annotations.no_uv_compile, ) .await .map_err(|e| { @@ -1587,7 +1503,6 @@ async fn handle_python_deps( worker_dir, occupancy_metrics, final_version, - annotations.no_uv || annotations.no_uv_install, ) .await?; additional_python_paths.append(&mut venv_path); @@ -1611,7 +1526,6 @@ async fn spawn_uv_install( (pip_extra_index_url, pip_index_url): (Option, Option), // If none, it is system python py_path: Option, - no_uv_install: bool, worker_dir: &str, ) -> Result { if !*DISABLE_NSJAIL { @@ -1653,15 +1567,12 @@ async fn spawn_uv_install( let _ = write_file( job_dir, &nsjail_proto, - &(if no_uv_install { - NSJAIL_CONFIG_DOWNLOAD_PY_CONTENT_FALLBACK - } else { - NSJAIL_CONFIG_DOWNLOAD_PY_CONTENT - }) - .replace("{WORKER_DIR}", worker_dir) - .replace("{PY_INSTALL_DIR}", &PY_INSTALL_DIR) - .replace("{TARGET_DIR}", &venv_p) - .replace("{CLONE_NEWUSER}", &(!*DISABLE_NUSER).to_string()), + NSJAIL_CONFIG_DOWNLOAD_PY_CONTENT + .replace("{WORKER_DIR}", worker_dir) + .replace("{PY_INSTALL_DIR}", &PY_INSTALL_DIR) + .replace("{TARGET_DIR}", &venv_p) + .replace("{CLONE_NEWUSER}", &(!*DISABLE_NUSER).to_string()) + .as_str(), )?; let mut nsjail_cmd = Command::new(NSJAIL_PATH.as_str()); @@ -1675,72 +1586,47 @@ async fn spawn_uv_install( .stderr(Stdio::piped()); start_child_process(nsjail_cmd, NSJAIL_PATH.as_str()).await } else { - let fssafe_req = NON_ALPHANUM_CHAR.replace_all(&req, "_").to_string(); #[cfg(unix)] - let req = if no_uv_install { - format!("'{}'", req) - } else { - req.to_owned() - }; + let req = req.to_owned(); #[cfg(windows)] let req = format!("{}", req); - let mut command_args = if no_uv_install { - vec![ - PYTHON_PATH.as_str(), - "-m", - "pip", - "install", - &req, - "-I", - "--no-deps", - "--no-color", - "--isolated", - "--no-warn-conflicts", - "--disable-pip-version-check", - "-t", - venv_p, - ] - } else { - vec![ - UV_PATH.as_str(), - "pip", - "install", - &req, - "--no-deps", - "--no-color", - // Prevent uv from discovering configuration files. - "--no-config", - "--link-mode=copy", - "--system", - // Prefer main index over extra - // https://docs.astral.sh/uv/pip/compatibility/#packages-that-exist-on-multiple-indexes - // TODO: Use env variable that can be toggled from UI - "--index-strategy", - "unsafe-best-match", - "--target", - venv_p, - "--no-cache", - // If we invoke uv pip install, then we want to overwrite existing data - "--reinstall", - ] - }; + let mut command_args = vec![ + UV_PATH.as_str(), + "pip", + "install", + &req, + "--no-deps", + "--no-color", + // Prevent uv from discovering configuration files. + "--no-config", + "--link-mode=copy", + "--system", + // Prefer main index over extra + // https://docs.astral.sh/uv/pip/compatibility/#packages-that-exist-on-multiple-indexes + // TODO: Use env variable that can be toggled from UI + "--index-strategy", + "unsafe-best-match", + "--target", + venv_p, + "--no-cache", + // If we invoke uv pip install, then we want to overwrite existing data + "--reinstall", + ]; - if !no_uv_install { - if let Some(py_path) = py_path.as_ref() { - command_args.extend([ - "-p", - py_path.as_str(), - "--python-preference", - "only-managed", // - ]); - } else { - command_args.extend([ - "--python-preference", - "only-system", // - ]); - } + if let Some(py_path) = py_path.as_ref() { + command_args.extend([ + "-p", + py_path.as_str(), + "--python-preference", + "only-managed", // + ]); + } else { + command_args.extend([ + "--python-preference", + "only-system", // + ]); } if let Some(url) = pip_extra_index_url.as_ref() { @@ -1774,42 +1660,19 @@ async fn spawn_uv_install( #[cfg(unix)] { - if no_uv_install { - let mut flock_cmd = Command::new(FLOCK_PATH.as_str()); - flock_cmd - .env_clear() - .envs(PROXY_ENVS.clone()) - .envs(envs) - .args([ - "-x", - &format!( - "{}/{}-{}.lock", - LOCK_CACHE_DIR, - if no_uv_install { "pip" } else { "py311" }, - fssafe_req - ), - "--command", - &command_args.join(" "), - ]) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()); - start_child_process(flock_cmd, FLOCK_PATH.as_str()).await - } else { - let mut cmd = Command::new(command_args[0]); - cmd.env_clear() - .envs(PROXY_ENVS.clone()) - .envs(envs) - .args(&command_args[1..]) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()); - start_child_process(cmd, UV_PATH.as_str()).await - } + let mut cmd = Command::new(command_args[0]); + cmd.env_clear() + .envs(PROXY_ENVS.clone()) + .envs(envs) + .args(&command_args[1..]) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + start_child_process(cmd, UV_PATH.as_str()).await } #[cfg(windows)] { - let installer_path = if no_uv_install { command_args[0] } else { "uv" }; - let mut cmd: Command = Command::new(&installer_path); + let mut cmd: Command = Command::new("uv"); cmd.env_clear() .envs(envs) .envs(PROXY_ENVS.clone()) @@ -1822,7 +1685,7 @@ async fn spawn_uv_install( .args(&command_args[1..]) .stdout(Stdio::piped()) .stderr(Stdio::piped()); - start_child_process(cmd, installer_path).await + start_child_process(cmd, "uv").await } } } @@ -1853,8 +1716,6 @@ pub async fn handle_python_reqs( worker_dir: &str, _occupancy_metrics: &mut Option<&mut OccupancyMetrics>, py_version: PyVersion, - // TODO: Remove (Deprecated) - mut no_uv_install: bool, ) -> error::Result> { let worker_dir = worker_dir.to_string(); @@ -1906,19 +1767,10 @@ pub async fn handle_python_reqs( .await; // Drop lock, so next print success can fire } - no_uv_install |= *USE_PIP_INSTALL; - if no_uv_install { - append_logs(&job_id, w_id, "\nFallback to pip (Deprecated!)\n", db).await; - tracing::warn!("Fallback to pip"); - } // Parallelism level (N) - let parallel_limit = if no_uv_install { - 1 - } else { - // Semaphore will panic if value less then 1 - PY_CONCURRENT_DOWNLOADS.clamp(1, 30) - }; + let parallel_limit = // Semaphore will panic if value less then 1 + PY_CONCURRENT_DOWNLOADS.clamp(1, 30); tracing::info!( workspace_id = %w_id, @@ -1953,11 +1805,7 @@ pub async fn handle_python_reqs( if req.starts_with('#') || req.starts_with('-') || req.trim().is_empty() { continue; } - let py_prefix = if no_uv_install { - PIP_CACHE_DIR - } else { - &py_version.to_cache_dir() - }; + let py_prefix = &py_version.to_cache_dir(); let venv_p = format!( "{py_prefix}/{}", @@ -2097,13 +1945,7 @@ pub async fn handle_python_reqs( let mut req_tl = 0; if total_to_install > 0 { let mut logs = String::new(); - // Do we use UV? - if no_uv_install { - logs.push_str("\n\n--- PIP INSTALL ---\n"); - } else { - logs.push_str("\n\n--- UV PIP INSTALL ---\n"); - } - + logs.push_str("\n\n--- UV PIP INSTALL ---\n"); logs.push_str("\nTo be installed: \n\n"); for (req, _) in &req_with_penv { if req.len() > req_tl { @@ -2135,13 +1977,9 @@ pub async fn handle_python_reqs( let is_not_pro = !matches!(get_license_plan().await, LicensePlan::Pro); let total_time = std::time::Instant::now(); - let py_path = if no_uv_install { - None - } else { - py_version - .get_python(job_id, mem_peak, db, _worker_name, w_id, _occupancy_metrics) - .await? - }; + let py_path = py_version + .get_python(job_id, mem_peak, db, _worker_name, w_id, _occupancy_metrics) + .await?; let has_work = req_with_penv.len() > 0; for ((i, (req, venv_p)), mut kill_rx) in @@ -2196,7 +2034,7 @@ pub async fn handle_python_reqs( tokio::select! { // Cancel was called on the job _ = kill_rx.recv() => return Err(anyhow::anyhow!("S3 pull was canceled")), - pull = pull_from_tar(os, venv_p.clone(), py_version.to_cache_dir_top_level(), no_uv_install) => { + pull = pull_from_tar(os, venv_p.clone(), py_version.to_cache_dir_top_level()) => { if let Err(e) = pull { tracing::info!( workspace_id = %w_id, @@ -2242,7 +2080,6 @@ pub async fn handle_python_reqs( &job_dir, pip_indexes, py_path, - no_uv_install, &worker_dir ).await { Ok(r) => r, @@ -2348,7 +2185,7 @@ pub async fn handle_python_reqs( #[cfg(all(feature = "enterprise", feature = "parquet", unix))] if s3_push { if let Some(os) = OBJECT_STORE_CACHE_SETTINGS.read().await.clone() { - tokio::spawn(build_tar_and_push(os, venv_p.clone(), py_version.to_cache_dir_top_level(), no_uv_install)); + tokio::spawn(build_tar_and_push(os, venv_p.clone(), py_version.to_cache_dir_top_level())); } } @@ -2580,8 +2417,20 @@ for line in sys.stdin: base_internal_url.to_string(), ); proc_envs.insert("BASE_URL".to_string(), base_internal_url.to_string()); + + let py_version = PyVersion::from_instance_version().await; + let python_path = get_python_path( + py_version, + worker_name, + &Uuid::nil(), + w_id, + &mut mem_peak, + db, + &mut None, + ) + .await?; handle_dedicated_process( - &*PYTHON_PATH, + &python_path, job_dir, context_envs, envs, diff --git a/backend/windmill-worker/src/result_processor.rs b/backend/windmill-worker/src/result_processor.rs index 1b7ec63a1ca12..4b433c4546f87 100644 --- a/backend/windmill-worker/src/result_processor.rs +++ b/backend/windmill-worker/src/result_processor.rs @@ -324,7 +324,7 @@ pub async fn process_result( } } err @ _ => to_raw_value(&SerializedError { - message: format!("error during execution of the script:\n{err:#}",), + message: format!("execution error:\n{err:#}",), name: "ExecutionErr".to_string(), step_id: job.flow_step_id.clone(), exit_code: None, diff --git a/backend/windmill-worker/src/worker.rs b/backend/windmill-worker/src/worker.rs index 590b57227c19a..5e647dd7cee72 100644 --- a/backend/windmill-worker/src/worker.rs +++ b/backend/windmill-worker/src/worker.rs @@ -264,10 +264,6 @@ pub async fn create_token_for_owner( pub const ROOT_CACHE_NOMOUNT_DIR: &str = concatcp!(TMP_DIR, "/cache_nomount/"); -pub const LOCK_CACHE_DIR: &str = concatcp!(ROOT_CACHE_DIR, "lock"); -// Used as fallback now -pub const PIP_CACHE_DIR: &str = concatcp!(ROOT_CACHE_DIR, "pip"); - pub const PY310_CACHE_DIR: &str = concatcp!(ROOT_CACHE_DIR, "python_310"); pub const PY311_CACHE_DIR: &str = concatcp!(ROOT_CACHE_DIR, "python_311"); pub const PY312_CACHE_DIR: &str = concatcp!(ROOT_CACHE_DIR, "python_312"); @@ -281,7 +277,6 @@ pub const TAR_PY313_CACHE_DIR: &str = concatcp!(ROOT_CACHE_DIR, "tar/python_313" pub const UV_CACHE_DIR: &str = concatcp!(ROOT_CACHE_DIR, "uv"); pub const PY_INSTALL_DIR: &str = concatcp!(ROOT_CACHE_DIR, "py_runtime"); pub const TAR_PYBASE_CACHE_DIR: &str = concatcp!(ROOT_CACHE_DIR, "tar"); -pub const TAR_PIP_CACHE_DIR: &str = concatcp!(ROOT_CACHE_DIR, "tar/pip"); pub const DENO_CACHE_DIR: &str = concatcp!(ROOT_CACHE_DIR, "deno"); pub const DENO_CACHE_DIR_DEPS: &str = concatcp!(ROOT_CACHE_DIR, "deno/deps"); pub const DENO_CACHE_DIR_NPM: &str = concatcp!(ROOT_CACHE_DIR, "deno/npm"); @@ -300,7 +295,6 @@ const NUM_SECS_PING: u64 = 5; const NUM_SECS_READINGS: u64 = 60; const INCLUDE_DEPS_PY_SH_CONTENT: &str = include_str!("../nsjail/download_deps.py.sh"); -const INCLUDE_DEPS_PY_SH_CONTENT_FALLBACK: &str = include_str!("../nsjail/download_deps.py.pip.sh"); pub const DEFAULT_CLOUD_TIMEOUT: u64 = 900; pub const DEFAULT_SELFHOSTED_TIMEOUT: u64 = 604800; // 7 days @@ -833,13 +827,6 @@ pub async fn run_worker( "download_deps.py.sh", INCLUDE_DEPS_PY_SH_CONTENT, ); - - // TODO: Remove (Deprecated) - let _ = write_file( - &worker_dir, - "download_deps.py.pip.sh", - INCLUDE_DEPS_PY_SH_CONTENT_FALLBACK, - ); } let mut last_ping = Instant::now() - Duration::from_secs(NUM_SECS_PING + 1); diff --git a/backend/windmill-worker/src/worker_lockfiles.rs b/backend/windmill-worker/src/worker_lockfiles.rs index 03ba19d2adeff..6ac4ad6aabc35 100644 --- a/backend/windmill-worker/src/worker_lockfiles.rs +++ b/backend/windmill-worker/src/worker_lockfiles.rs @@ -39,8 +39,7 @@ use crate::csharp_executor::generate_nuget_lockfile; use crate::php_executor::{composer_install, parse_php_imports}; #[cfg(feature = "python")] use crate::python_executor::{ - create_dependencies_dir, handle_python_reqs, uv_pip_compile, PyVersion, USE_PIP_COMPILE, - USE_PIP_INSTALL, + create_dependencies_dir, handle_python_reqs, uv_pip_compile, PyVersion, }; #[cfg(feature = "rust")] use crate::rust_executor::generate_cargo_lockfile; @@ -397,10 +396,19 @@ pub async fn handle_dependency_job( ) .execute(db) .await?; - Err(Error::ExecutionErr(format!("Error locking file: {error}")))? + Err(Error::ExecutionErr(format!( + "Error locking file: {error}\n\nlogs:\n{}", + remove_ansi_codes(&logs2) + )))? } } } +fn remove_ansi_codes(s: &str) -> String { + lazy_static::lazy_static! { + static ref ANSI_REGEX: regex::Regex = regex::Regex::new(r"\x1b\[[0-9;]*[a-zA-Z]").unwrap(); + } + ANSI_REGEX.replace_all(s, "").to_string() +} async fn trigger_dependents_to_recompute_dependencies( w_id: &str, @@ -739,6 +747,11 @@ fn get_deployment_msg_and_parent_path_from_args( (deployment_message, parent_path) } +struct LockModuleError { + id: String, + error: Error, +} + async fn lock_modules<'c>( modules: Vec, job: &QueuedJob, @@ -762,7 +775,9 @@ async fn lock_modules<'c>( )> { let mut new_flow_modules = Vec::new(); let mut modified_ids = Vec::new(); + let mut errors = Vec::new(); for mut e in modules.into_iter() { + let id = e.id.clone(); let mut nmodified_ids = Vec::new(); let FlowModuleValue::RawScript { lock, @@ -1014,12 +1029,7 @@ async fn lock_modules<'c>( } Err(error) => { // TODO: Record flow raw script error lock logs - tracing::warn!( - path = path, - language = ?language, - error = ?error, - "Failed to generate flow lock for raw script" - ); + errors.push(LockModuleError { id, error }); None } }; @@ -1038,6 +1048,28 @@ async fn lock_modules<'c>( new_flow_modules.push(e); continue; } + if !errors.is_empty() { + let error_message = errors + .iter() + .map(|e| format!("{}: {}", e.id, e.error)) + .collect::>() + .join("\n"); + let logs2 = sqlx::query_scalar!( + "SELECT logs FROM job_logs WHERE job_id = $1 AND workspace_id = $2", + &job.id, + &job.workspace_id + ) + .fetch_optional(db) + .await? + .flatten() + .unwrap_or_else(|| "no logs".to_string()); + + return Err(Error::ExecutionErr(format!( + "Error locking flow modules:\n{}\n\nlogs:\n{}", + error_message, + remove_ansi_codes(&logs2) + ))); + } Ok((new_flow_modules, tx, modified_ids)) } @@ -1602,8 +1634,6 @@ async fn python_dep( occupancy_metrics: &mut Option<&mut OccupancyMetrics>, annotated_pyv_numeric: Option, annotations: PythonAnnotations, - no_uv_compile: bool, - no_uv_install: bool, ) -> std::result::Result { create_dependencies_dir(job_dir).await; @@ -1634,7 +1664,6 @@ async fn python_dep( occupancy_metrics, final_version, annotations.no_cache, - no_uv_compile, ) .await; // install the dependencies to pre-fill the cache @@ -1651,7 +1680,6 @@ async fn python_dep( worker_dir, occupancy_metrics, final_version, - no_uv_install, ) .await; @@ -1716,21 +1744,6 @@ async fn capture_dependency_job( .await? .join("\n") }; - let PythonAnnotations { no_uv, no_uv_install, no_uv_compile, .. } = anns; - if no_uv || no_uv_install || no_uv_compile || *USE_PIP_COMPILE || *USE_PIP_INSTALL { - if let Err(e) = sqlx::query!( - r#" - INSERT INTO metrics (id, value) - VALUES ('no_uv_usage_py', $1) - "#, - serde_json::to_value("").map_err(to_anyhow)? - ) - .execute(db) - .await - { - tracing::error!("Error inserting no_uv_usage_py to db: {:?}", e); - } - } python_dep( reqs, @@ -1745,8 +1758,6 @@ async fn capture_dependency_job( &mut Some(occupancy_metrics), annotated_pyv_numeric, anns, - no_uv_compile | no_uv, - no_uv_install | no_uv, ) .await } @@ -1767,21 +1778,6 @@ async fn capture_dependency_job( let (_logs, reqs, _) = windmill_parser_yaml::parse_ansible_reqs(job_raw_code)?; let reqs = reqs.map(|r| r.python_reqs.join("\n")).unwrap_or_default(); - if *USE_PIP_COMPILE || *USE_PIP_INSTALL { - if let Err(e) = sqlx::query!( - r#" - INSERT INTO metrics (id, value) - VALUES ('no_uv_usage_ansible', $1) - "#, - serde_json::to_value("").map_err(to_anyhow)? - ) - .execute(db) - .await - { - tracing::error!("Error inserting no_uv_usage_ansible to db: {:?}", e); - }; - } - python_dep( reqs, job_id, @@ -1795,8 +1791,6 @@ async fn capture_dependency_job( &mut Some(occupancy_metrics), None, PythonAnnotations::default(), - false, - false, ) .await } diff --git a/benchmarks/lib.ts b/benchmarks/lib.ts index 2ab25fbf4311d..69ee33f797150 100644 --- a/benchmarks/lib.ts +++ b/benchmarks/lib.ts @@ -2,7 +2,7 @@ import { sleep } from "https://deno.land/x/sleep@v1.2.1/mod.ts"; import * as windmill from "https://deno.land/x/windmill@v1.174.0/mod.ts"; import * as api from "https://deno.land/x/windmill@v1.174.0/windmill-api/index.ts"; -export const VERSION = "v1.465.0"; +export const VERSION = "v1.466.2"; export async function login(email: string, password: string): Promise { return await windmill.UserService.login({ diff --git a/cli/main.ts b/cli/main.ts index 7bba208601a06..a07f0ec7d7960 100644 --- a/cli/main.ts +++ b/cli/main.ts @@ -62,7 +62,7 @@ export { // } // }); -export const VERSION = "1.465.0"; +export const VERSION = "1.466.2"; const command = new Command() .name("wmill") diff --git a/cli/metadata.ts b/cli/metadata.ts index d547cee29f8c8..8bfb21a7cc163 100644 --- a/cli/metadata.ts +++ b/cli/metadata.ts @@ -49,6 +49,13 @@ import { FlowFile, replaceInlineScripts } from "./flow.ts"; import { getIsWin } from "./main.ts"; import { FlowValue } from "./gen/types.gen.ts"; +export class LockfileGenerationError extends Error { + constructor(message: string) { + super(message); + this.name = 'LockfileGenerationError'; + } +} + export async function generateAllMetadata() { } function findClosestRawReqs( @@ -376,9 +383,16 @@ async function updateScriptLock( const response = JSON.parse(responseText); const lock = response.lock; if (lock === undefined) { - throw new Error( - `Failed to generate lockfile. Full response was: ${JSON.stringify( - response + if (response?.["error"]?.["message"]) { + throw new LockfileGenerationError( + `Failed to generate lockfile: ${response?.["error"]?.["message"]}` + ); + } + throw new LockfileGenerationError( + `Failed to generate lockfile: ${JSON.stringify( + response, + null, + 2 )}` ); } @@ -395,8 +409,11 @@ async function updateScriptLock( metadataContent.lock = ""; } } catch (e) { - throw new Error( - `Failed to generate lockfile. Status was: ${rawResponse.statusText}, ${responseText}, ${e}` + if (e instanceof LockfileGenerationError) { + throw e; + } + throw new LockfileGenerationError( + `Failed to generate lockfile:${rawResponse.statusText}, ${responseText}, ${e}` ); } } @@ -427,8 +444,20 @@ export async function updateFlow( try { const res = (await rawResponse.json()) as | { updated_flow_value: any } + | { error: { message: string } } | undefined; - return res?.updated_flow_value; + if (rawResponse.status != 200) { + const msg = (res as any)?.["error"]?.["message"] + if (msg) { + throw new LockfileGenerationError( + `Failed to generate lockfile: ${msg}` + ); + } + throw new LockfileGenerationError( + `Failed to generate lockfile: ${rawResponse.statusText}, ${responseText}` + ); + } + return (res as any).updated_flow_value; } catch (e) { try { responseText = await rawResponse.text(); diff --git a/cli/script.ts b/cli/script.ts index 6ab67212e8a0c..37ae635380a42 100644 --- a/cli/script.ts +++ b/cli/script.ts @@ -990,7 +990,7 @@ const command = new Command() .action(bootstrap as any) .command( "generate-metadata", - "re-generate the metadata file updating the lock and the script schema (for flows, use `wmill flow generate - locks`)" + "re-generate the metadata file updating the lock and the script schema (for flows, use `wmill flow generate-locks`)" ) .arguments("[script:file]") .option("--yes", "Skip confirmation prompt") diff --git a/flake.nix b/flake.nix index 219a103cd9d98..6f4463e5ae28d 100644 --- a/flake.nix +++ b/flake.nix @@ -97,7 +97,6 @@ REMOTE_LSP = "http://127.0.0.1:3001"; RUSTC_WRAPPER = "${pkgs.sccache}/bin/sccache"; DENO_PATH = "${pkgs.deno}/bin/deno"; - PYTHON_PATH = "${pkgs.python3}/bin/python3"; GO_PATH = "${pkgs.go}/bin/go"; BUN_PATH = "${pkgs.bun}/bin/bun"; UV_PATH = "${pkgs.uv}/bin/uv"; diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 6ab4e0466aeb0..732063508f830 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -1,12 +1,12 @@ { "name": "windmill-components", - "version": "1.465.0", + "version": "1.466.2", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "windmill-components", - "version": "1.465.0", + "version": "1.466.2", "license": "AGPL-3.0", "dependencies": { "@anthropic-ai/sdk": "^0.32.1", diff --git a/frontend/package.json b/frontend/package.json index b53161199f805..962e69968e25d 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,6 +1,6 @@ { "name": "windmill-components", - "version": "1.465.0", + "version": "1.466.2", "scripts": { "dev": "vite dev", "build": "vite build", diff --git a/frontend/src/lib/components/DisplayResult.svelte b/frontend/src/lib/components/DisplayResult.svelte index 80f7ce999f692..b365bbdd96f3a 100644 --- a/frontend/src/lib/components/DisplayResult.svelte +++ b/frontend/src/lib/components/DisplayResult.svelte @@ -76,6 +76,7 @@ | 'pdf' | undefined + let hasBigInt = false $: resultKind = inferResultKind(result) export let forceJson = false @@ -168,6 +169,13 @@ largeObject = size > DISPLAY_MAX_SIZE } + if (!largeObject) { + hasBigInt = checkIfHasBigInt(result) + if (hasBigInt) { + return 'json' + } + } + if (Array.isArray(result)) { if (result.length === 0) { return 'json' @@ -263,6 +271,21 @@ } } + function checkIfHasBigInt(result: any) { + if (typeof result === 'number' && Number.isInteger(result) && !Number.isSafeInteger(result)) { + return true + } + + if (Array.isArray(result)) { + return result.some(checkIfHasBigInt) + } + + if (result && typeof result === 'object') { + return Object.values(result).some(checkIfHasBigInt) + } + return false + } + function contentOrRootString(obj: string | { filename: string; content: string } | undefined) { if (obj == undefined || obj == null) { return '' @@ -401,14 +424,17 @@ /> {/each} -{:else if resultKind === 'nondisplayable'}
Non displayable object
{:else}
Non displayable object
+{:else} +
{#if result != undefined && length != undefined && largeObject != undefined}
+ > + {#if result != undefined && length != undefined && largeObject != undefined} +
+
{#if !hideAsJson && !['json', 's3object'].includes(resultKind ?? '') && typeof result === 'object'} {/if}
-
{#if !forceJson && resultKind === 'table-col'} +
+
+ {#if !forceJson && resultKind === 'table-col'} {@const data = 'table-col' in result ? result['table-col'] : result} {:else if !forceJson && resultKind === 'table-row'} @@ -779,7 +806,7 @@
- {:else if largeObject} + {:else if largeObject || hasBigInt} {#if result && typeof result === 'object' && 'file' in result} {:else} -
- Download {filename ? '' : 'as JSON'} - - {#if download_as_csv} - convertJsonToCsv(result)} - customText="Download as CSV" - /> - {/if} -
+ {#if largeObject} +
+ Download {filename ? '' : 'as JSON'} + + {#if download_as_csv} + convertJsonToCsv(result)} + customText="Download as CSV" + /> + {/if} +
-
- -
+
+ +
+ {/if} {#if result && result != 'WINDMILL_TOO_BIG'} {/if} diff --git a/frontend/src/lib/components/apps/components/display/dbtable/AppDbExplorer.svelte b/frontend/src/lib/components/apps/components/display/dbtable/AppDbExplorer.svelte index 511856b0537cd..9148ef6524fdd 100644 --- a/frontend/src/lib/components/apps/components/display/dbtable/AppDbExplorer.svelte +++ b/frontend/src/lib/components/apps/components/display/dbtable/AppDbExplorer.svelte @@ -702,7 +702,6 @@ - { - const shouldFilter = t.isidentity !== ColumnIdentity.Always && t?.hideInsert === true + const shouldFilter = t.isidentity === ColumnIdentity.Always || t?.hideInsert === true return !shouldFilter }) diff --git a/frontend/src/lib/components/apps/components/display/table/AppAggridExplorerTable.svelte b/frontend/src/lib/components/apps/components/display/table/AppAggridExplorerTable.svelte index 1dd3ba40e6036..4e30009c0b07e 100644 --- a/frontend/src/lib/components/apps/components/display/table/AppAggridExplorerTable.svelte +++ b/frontend/src/lib/components/apps/components/display/table/AppAggridExplorerTable.svelte @@ -17,7 +17,7 @@ import { Button } from '$lib/components/common' import { cellRendererFactory, defaultCellRenderer } from './utils' import { Download, Trash2 } from 'lucide-svelte' - import type { ColumnDef } from '../dbtable/utils' + import { ColumnIdentity, type ColumnDef } from '../dbtable/utils' import AppAggridTableActions from './AppAggridTableActions.svelte' import Popover from '$lib/components/Popover.svelte' @@ -269,6 +269,14 @@ columnDefs.forEach((colDef, index) => { let noField = !colDef.field || typeof colDef.field !== 'string' || colDef.field.trim() === '' + if ( + (colDef.isidentity === ColumnIdentity.ByDefault || + colDef.isidentity === ColumnIdentity.Always) && + colDef.hideInsert == undefined + ) { + colDef.hideInsert = true + } + // Check if 'field' property exists and is a non-empty string if (noField && !(colDef.children && Array.isArray(colDef.children))) { isValid = false diff --git a/frontend/src/lib/components/copilot/lib.ts b/frontend/src/lib/components/copilot/lib.ts index 69d5ac9b7d7b6..9d9a63f89d88c 100644 --- a/frontend/src/lib/components/copilot/lib.ts +++ b/frontend/src/lib/components/copilot/lib.ts @@ -32,11 +32,13 @@ import type { ChatCompletionRequest } from '@mistralai/mistralai/models/componen export const SUPPORTED_LANGUAGES = new Set(Object.keys(GEN_CONFIG.prompts)) +// need at least one model for each provider except customai export const AI_DEFAULT_MODELS: Record = { openai: ['gpt-4o', 'gpt-4o-mini'], anthropic: ['claude-3-5-sonnet-latest', 'claude-3-5-haiku-latest'], mistral: ['codestral-latest'], deepseek: ['deepseek-chat', 'deepseek-reasoner'], + googleai: ['gemini-1.5-pro', 'gemini-2.0-flash', 'gemini-1.5-flash'], groq: ['llama-3.3-70b-versatile', 'llama-3.1-8b-instant'], openrouter: ['meta-llama/llama-3.2-3b-instruct:free'], customai: [] @@ -45,7 +47,55 @@ export const AI_DEFAULT_MODELS: Record = { export const OPENAI_COMPATIBLE_BASE_URLS = { groq: 'https://api.groq.com/openai/v1', openrouter: 'https://openrouter.ai/api/v1', - deepseek: 'https://api.deepseek.com/v1' + deepseek: 'https://api.deepseek.com/v1', + googleai: 'https://generativelanguage.googleapis.com/v1beta/openai' +} as const + +function prepareOpenaiCompatibleMessages( + aiProvider: AIProvider, + messages: ChatCompletionMessageParam[] +) { + switch (aiProvider) { + case 'googleai': + // system messages are not supported by gemini + const systemMessage = messages.find((m) => m.role === 'system') + if (systemMessage) { + messages.shift() + const startMessages: ChatCompletionMessageParam[] = [ + { + role: 'user', + content: 'System prompt: ' + (systemMessage.content as string) + }, + { + role: 'assistant', + content: 'Understood' + } + ] + messages = [...startMessages, ...messages] + } + return messages + default: + return messages + } +} + +const DEFAULT_COMPLETION_CONFIG: ChatCompletionCreateParamsStreaming = { + model: '', + max_tokens: 8192, //TODO: make this dynamic + temperature: 0, + seed: 42, + stream: true, + messages: [] +} + +export const OPENAI_COMPATIBLE_COMPLETION_CONFIG = { + groq: DEFAULT_COMPLETION_CONFIG, + openrouter: DEFAULT_COMPLETION_CONFIG, + deepseek: DEFAULT_COMPLETION_CONFIG, + googleai: { + ...DEFAULT_COMPLETION_CONFIG, + seed: undefined // not supported by gemini + } as ChatCompletionCreateParamsStreaming } as const class WorkspacedAIClients { @@ -115,15 +165,6 @@ class WorkspacedAIClients { export const workspaceAIClients = new WorkspacedAIClients() -const DEFAULT_COMPLETION_CONFIG: ChatCompletionCreateParamsStreaming = { - model: '', - max_tokens: 8000, //TODO: make this dynamic - temperature: 0, - seed: 42, - stream: true, - messages: [] -} - namespace MistralAI { export const mistralConfig: ChatCompletionRequest = { temperature: 0, @@ -510,10 +551,18 @@ export async function getNonStreamingCompletion( dangerouslyAllowBrowser: true }) : workspaceAIClients.getOpenaiClient() + const config = + aiProvider === 'openai' + ? OpenAi.openaiConfig + : OPENAI_COMPATIBLE_COMPLETION_CONFIG[aiProvider] + if (!config) { + throw new Error('No config for this provider: ' + aiProvider) + } + const processedMessages = prepareOpenaiCompatibleMessages(aiProvider, messages) const completion = await openaiClient.chat.completions.create( { - ...(aiProvider === 'openai' ? OpenAi.openaiConfig : DEFAULT_COMPLETION_CONFIG), - messages, + ...config, + messages: processedMessages, model, stream: false }, @@ -578,11 +627,19 @@ export async function getCompletion( } default: { const openaiClient = workspaceAIClients.getOpenaiClient() + const config: ChatCompletionCreateParamsStreaming = + aiProvider === 'openai' + ? OpenAi.openaiConfig + : OPENAI_COMPATIBLE_COMPLETION_CONFIG[aiProvider] + if (!config) { + throw new Error('No config for this provider: ' + aiProvider) + } + const processedMessages = prepareOpenaiCompatibleMessages(aiProvider, messages) const completion = await openaiClient.chat.completions.create( { - ...(aiProvider === 'openai' ? OpenAi.openaiConfig : DEFAULT_COMPLETION_CONFIG), + ...config, model, - messages + messages: processedMessages }, { signal: abortController.signal diff --git a/frontend/src/lib/components/propertyPicker/ObjectViewer.svelte b/frontend/src/lib/components/propertyPicker/ObjectViewer.svelte index 31cb5d554a7b6..66ebd350e80c6 100644 --- a/frontend/src/lib/components/propertyPicker/ObjectViewer.svelte +++ b/frontend/src/lib/components/propertyPicker/ObjectViewer.svelte @@ -6,10 +6,11 @@ import { NEVER_TESTED_THIS_FAR } from '../flows/models' import Portal from '$lib/components/Portal.svelte' import { Button } from '$lib/components/common' - import { Download, PanelRightOpen } from 'lucide-svelte' + import { Download, PanelRightOpen, TriangleAlertIcon } from 'lucide-svelte' import S3FilePicker from '../S3FilePicker.svelte' import { workspaceStore } from '$lib/stores' import AnimatedButton from '$lib/components/common/button/AnimatedButton.svelte' + import Popover from '../Popover.svelte' export let json: any export let level = 0 @@ -166,6 +167,17 @@ null {:else if typeof json[key] == 'string'} "{truncate(json[key], 200)}" + {:else if typeof json[key] == 'number' && Number.isInteger(json[key]) && !Number.isSafeInteger(json[key])} + + {truncate(JSON.stringify(json[key]), 200)} + + + + This number is too large for the frontend to handle correctly and may be + rounded. + + + {:else} {truncate(JSON.stringify(json[key]), 200)} diff --git a/frontend/src/lib/components/wizards/DBExplorerWizard.svelte b/frontend/src/lib/components/wizards/DBExplorerWizard.svelte index ff8ab10505aa3..42583500604a3 100644 --- a/frontend/src/lib/components/wizards/DBExplorerWizard.svelte +++ b/frontend/src/lib/components/wizards/DBExplorerWizard.svelte @@ -74,14 +74,15 @@ function computeWarning(columnMetadata, value) { if (columnMetadata?.isnullable === 'NO' && !columnMetadata?.defaultvalue) { - if ([ColumnIdentity.Always, ColumnIdentity.ByDefault].includes(columnMetadata?.isidentity)) { + if ([ColumnIdentity.ByDefault].includes(columnMetadata?.isidentity)) { return { type: 'info' as AlertType, title: 'Value will be generated', - message: 'The column is an identity column. The value will be generated by the database.' + message: + 'The column is an identity column. The value will be generated by the database unless a default is provided.' } } - if (value?.hideInsert) { + if (value?.hideInsert && columnMetadata?.isidentity !== ColumnIdentity.Always) { return { type: 'warning' as AlertType, title: 'No default value', @@ -173,6 +174,7 @@ { e?.stopPropagation() }} @@ -181,6 +183,11 @@ /> + {#if value?.isidentity === ColumnIdentity.Always} + + This column is an ALWAYS identity column and so can't be provided by the user. + + {/if} {#if warning} diff --git a/frontend/src/lib/script_helpers.ts b/frontend/src/lib/script_helpers.ts index 5065ba91f10d4..1ce65316dacc1 100644 --- a/frontend/src/lib/script_helpers.ts +++ b/frontend/src/lib/script_helpers.ts @@ -911,14 +911,13 @@ export const INITIAL_CODE = { } export function isInitialCode(content: string): boolean { - Object.values(INITIAL_CODE).forEach((lang) => { - Object.values(lang).forEach((code) => { + for (const lang of Object.values(INITIAL_CODE)) { + for (const code of Object.values(lang)) { if (content === code) { return true } - }) - }) - + } + } return false } diff --git a/frontend/src/routes/(root)/(logged)/workspace_settings/+page.svelte b/frontend/src/routes/(root)/(logged)/workspace_settings/+page.svelte index 1a060debfb3b0..35d26d6830a41 100644 --- a/frontend/src/routes/(root)/(logged)/workspace_settings/+page.svelte +++ b/frontend/src/routes/(root)/(logged)/workspace_settings/+page.svelte @@ -1074,6 +1074,7 @@ +