From cf760567d9adf9ba4ff06e0d6fa9cffb89d85499 Mon Sep 17 00:00:00 2001 From: Sandeep Kumta Vishnu Date: Sat, 21 Feb 2026 17:51:46 -0800 Subject: [PATCH 1/8] feat: add `snow streamlit logs` command for live log streaming Add a new `snow streamlit logs` subcommand that connects to a deployed Streamlit app's developer log service via WebSocket and streams log entries to the terminal in real time. The command reads the app name from snowflake.yml (matching the `snow streamlit deploy` pattern) and supports both human-readable (default) and JSONL (`--format json`) output formats. Key changes: - Copy logs_service.proto and generate Python protobuf bindings - Add log_streaming.py with WebSocket connection, token auth, and streaming loop (ping/pong disabled for compatibility) - Add `logs` subcommand to streamlit commands with --tail and --format - Add protobuf and websockets dependencies to pyproject.toml - SQL injection guard on FQN interpolation --- pylock.toml | 67 +++++ pyproject.toml | 2 + snyk/requirements.txt | 1 + .../cli/_plugins/streamlit/commands.py | 56 +++++ .../cli/_plugins/streamlit/log_streaming.py | 228 ++++++++++++++++++ .../cli/_plugins/streamlit/proto/__init__.py | 0 .../proto/developer/v1/logs_service.proto | 40 +++ .../streamlit/proto/generated/__init__.py | 0 .../proto/generated/developer/__init__.py | 0 .../proto/generated/developer/v1/__init__.py | 0 .../developer/v1/logs_service_pb2.py | 43 ++++ 11 files changed, 437 insertions(+) create mode 100644 src/snowflake/cli/_plugins/streamlit/log_streaming.py create mode 100644 src/snowflake/cli/_plugins/streamlit/proto/__init__.py create mode 100644 src/snowflake/cli/_plugins/streamlit/proto/developer/v1/logs_service.proto create mode 100644 src/snowflake/cli/_plugins/streamlit/proto/generated/__init__.py create mode 100644 src/snowflake/cli/_plugins/streamlit/proto/generated/developer/__init__.py create mode 100644 src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/__init__.py create mode 100644 src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/logs_service_pb2.py diff --git a/pylock.toml b/pylock.toml index 8c6438fa2d..ab445f4e6f 100644 --- a/pylock.toml +++ b/pylock.toml @@ -877,6 +877,73 @@ version = "0.2.13" sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", upload-time = 2024-01-06T02:10:57Z, size = 101301, hashes = { sha256 = "72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5" } } wheels = [{ url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", upload-time = 2024-01-06T02:10:55Z, size = 34166, hashes = { sha256 = "3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859" } }] +[[packages]] +name = "websockets" +version = "16.0" +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", upload-time = 2026-01-10T09:23:47Z, size = 179346, hashes = { sha256 = "5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5" } } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/74/221f58decd852f4b59cc3354cccaf87e8ef695fede361d03dc9a7396573b/websockets-16.0-cp310-cp310-macosx_10_9_universal2.whl", upload-time = 2026-01-10T09:22:21Z, size = 177343, hashes = { sha256 = "04cdd5d2d1dacbad0a7bf36ccbcd3ccd5a30ee188f2560b7a62a30d14107b31a" } }, + { url = "https://files.pythonhosted.org/packages/19/0f/22ef6107ee52ab7f0b710d55d36f5a5d3ef19e8a205541a6d7ffa7994e5a/websockets-16.0-cp310-cp310-macosx_10_9_x86_64.whl", upload-time = 2026-01-10T09:22:22Z, size = 175021, hashes = { sha256 = "8ff32bb86522a9e5e31439a58addbb0166f0204d64066fb955265c4e214160f0" } }, + { url = "https://files.pythonhosted.org/packages/10/40/904a4cb30d9b61c0e278899bf36342e9b0208eb3c470324a9ecbaac2a30f/websockets-16.0-cp310-cp310-macosx_11_0_arm64.whl", upload-time = 2026-01-10T09:22:23Z, size = 175320, hashes = { sha256 = "583b7c42688636f930688d712885cf1531326ee05effd982028212ccc13e5957" } }, + { url = "https://files.pythonhosted.org/packages/9d/2f/4b3ca7e106bc608744b1cdae041e005e446124bebb037b18799c2d356864/websockets-16.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", upload-time = 2026-01-10T09:22:25Z, size = 183815, hashes = { sha256 = "7d837379b647c0c4c2355c2499723f82f1635fd2c26510e1f587d89bc2199e72" } }, + { url = "https://files.pythonhosted.org/packages/86/26/d40eaa2a46d4302becec8d15b0fc5e45bdde05191e7628405a19cf491ccd/websockets-16.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", upload-time = 2026-01-10T09:22:27Z, size = 185054, hashes = { sha256 = "df57afc692e517a85e65b72e165356ed1df12386ecb879ad5693be08fac65dde" } }, + { url = "https://files.pythonhosted.org/packages/b0/ba/6500a0efc94f7373ee8fefa8c271acdfd4dca8bd49a90d4be7ccabfc397e/websockets-16.0-cp310-cp310-musllinux_1_2_aarch64.whl", upload-time = 2026-01-10T09:22:28Z, size = 184565, hashes = { sha256 = "2b9f1e0d69bc60a4a87349d50c09a037a2607918746f07de04df9e43252c77a3" } }, + { url = "https://files.pythonhosted.org/packages/04/b4/96bf2cee7c8d8102389374a2616200574f5f01128d1082f44102140344cc/websockets-16.0-cp310-cp310-musllinux_1_2_x86_64.whl", upload-time = 2026-01-10T09:22:30Z, size = 183848, hashes = { sha256 = "335c23addf3d5e6a8633f9f8eda77efad001671e80b95c491dd0924587ece0b3" } }, + { url = "https://files.pythonhosted.org/packages/02/8e/81f40fb00fd125357814e8c3025738fc4ffc3da4b6b4a4472a82ba304b41/websockets-16.0-cp310-cp310-win32.whl", upload-time = 2026-01-10T09:22:32Z, size = 178249, hashes = { sha256 = "37b31c1623c6605e4c00d466c9d633f9b812ea430c11c8a278774a1fde1acfa9" } }, + { url = "https://files.pythonhosted.org/packages/b4/5f/7e40efe8df57db9b91c88a43690ac66f7b7aa73a11aa6a66b927e44f26fa/websockets-16.0-cp310-cp310-win_amd64.whl", upload-time = 2026-01-10T09:22:33Z, size = 178685, hashes = { sha256 = "8e1dab317b6e77424356e11e99a432b7cb2f3ec8c5ab4dabbcee6add48f72b35" } }, + { url = "https://files.pythonhosted.org/packages/f2/db/de907251b4ff46ae804ad0409809504153b3f30984daf82a1d84a9875830/websockets-16.0-cp311-cp311-macosx_10_9_universal2.whl", upload-time = 2026-01-10T09:22:34Z, size = 177340, hashes = { sha256 = "31a52addea25187bde0797a97d6fc3d2f92b6f72a9370792d65a6e84615ac8a8" } }, + { url = "https://files.pythonhosted.org/packages/f3/fa/abe89019d8d8815c8781e90d697dec52523fb8ebe308bf11664e8de1877e/websockets-16.0-cp311-cp311-macosx_10_9_x86_64.whl", upload-time = 2026-01-10T09:22:36Z, size = 175022, hashes = { sha256 = "417b28978cdccab24f46400586d128366313e8a96312e4b9362a4af504f3bbad" } }, + { url = "https://files.pythonhosted.org/packages/58/5d/88ea17ed1ded2079358b40d31d48abe90a73c9e5819dbcde1606e991e2ad/websockets-16.0-cp311-cp311-macosx_11_0_arm64.whl", upload-time = 2026-01-10T09:22:37Z, size = 175319, hashes = { sha256 = "af80d74d4edfa3cb9ed973a0a5ba2b2a549371f8a741e0800cb07becdd20f23d" } }, + { url = "https://files.pythonhosted.org/packages/d2/ae/0ee92b33087a33632f37a635e11e1d99d429d3d323329675a6022312aac2/websockets-16.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", upload-time = 2026-01-10T09:22:38Z, size = 184631, hashes = { sha256 = "08d7af67b64d29823fed316505a89b86705f2b7981c07848fb5e3ea3020c1abe" } }, + { url = "https://files.pythonhosted.org/packages/c8/c5/27178df583b6c5b31b29f526ba2da5e2f864ecc79c99dae630a85d68c304/websockets-16.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", upload-time = 2026-01-10T09:22:39Z, size = 185870, hashes = { sha256 = "7be95cfb0a4dae143eaed2bcba8ac23f4892d8971311f1b06f3c6b78952ee70b" } }, + { url = "https://files.pythonhosted.org/packages/87/05/536652aa84ddc1c018dbb7e2c4cbcd0db884580bf8e95aece7593fde526f/websockets-16.0-cp311-cp311-musllinux_1_2_aarch64.whl", upload-time = 2026-01-10T09:22:41Z, size = 185361, hashes = { sha256 = "d6297ce39ce5c2e6feb13c1a996a2ded3b6832155fcfc920265c76f24c7cceb5" } }, + { url = "https://files.pythonhosted.org/packages/6d/e2/d5332c90da12b1e01f06fb1b85c50cfc489783076547415bf9f0a659ec19/websockets-16.0-cp311-cp311-musllinux_1_2_x86_64.whl", upload-time = 2026-01-10T09:22:42Z, size = 184615, hashes = { sha256 = "1c1b30e4f497b0b354057f3467f56244c603a79c0d1dafce1d16c283c25f6e64" } }, + { url = "https://files.pythonhosted.org/packages/77/fb/d3f9576691cae9253b51555f841bc6600bf0a983a461c79500ace5a5b364/websockets-16.0-cp311-cp311-win32.whl", upload-time = 2026-01-10T09:22:43Z, size = 178246, hashes = { sha256 = "5f451484aeb5cafee1ccf789b1b66f535409d038c56966d6101740c1614b86c6" } }, + { url = "https://files.pythonhosted.org/packages/54/67/eaff76b3dbaf18dcddabc3b8c1dba50b483761cccff67793897945b37408/websockets-16.0-cp311-cp311-win_amd64.whl", upload-time = 2026-01-10T09:22:44Z, size = 178684, hashes = { sha256 = "8d7f0659570eefb578dacde98e24fb60af35350193e4f56e11190787bee77dac" } }, + { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", upload-time = 2026-01-10T09:22:46Z, size = 177365, hashes = { sha256 = "71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00" } }, + { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", upload-time = 2026-01-10T09:22:47Z, size = 175038, hashes = { sha256 = "8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79" } }, + { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", upload-time = 2026-01-10T09:22:49Z, size = 175328, hashes = { sha256 = "86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39" } }, + { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", upload-time = 2026-01-10T09:22:51Z, size = 184915, hashes = { sha256 = "9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c" } }, + { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", upload-time = 2026-01-10T09:22:52Z, size = 186152, hashes = { sha256 = "e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f" } }, + { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", upload-time = 2026-01-10T09:22:53Z, size = 185583, hashes = { sha256 = "a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1" } }, + { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", upload-time = 2026-01-10T09:22:55Z, size = 184880, hashes = { sha256 = "485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2" } }, + { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", upload-time = 2026-01-10T09:22:56Z, size = 178261, hashes = { sha256 = "eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89" } }, + { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", upload-time = 2026-01-10T09:22:57Z, size = 178693, hashes = { sha256 = "5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea" } }, + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", upload-time = 2026-01-10T09:22:59Z, size = 177364, hashes = { sha256 = "878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9" } }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", upload-time = 2026-01-10T09:23:01Z, size = 175039, hashes = { sha256 = "52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230" } }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", upload-time = 2026-01-10T09:23:02Z, size = 175323, hashes = { sha256 = "e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c" } }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", upload-time = 2026-01-10T09:23:03Z, size = 184975, hashes = { sha256 = "95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5" } }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", upload-time = 2026-01-10T09:23:05Z, size = 186203, hashes = { sha256 = "c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82" } }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", upload-time = 2026-01-10T09:23:06Z, size = 185653, hashes = { sha256 = "52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8" } }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", upload-time = 2026-01-10T09:23:07Z, size = 184920, hashes = { sha256 = "6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f" } }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", upload-time = 2026-01-10T09:23:09Z, size = 178255, hashes = { sha256 = "abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a" } }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", upload-time = 2026-01-10T09:23:10Z, size = 178689, hashes = { sha256 = "3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156" } }, + { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", upload-time = 2026-01-10T09:23:12Z, size = 177406, hashes = { sha256 = "8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0" } }, + { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", upload-time = 2026-01-10T09:23:13Z, size = 175085, hashes = { sha256 = "daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904" } }, + { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", upload-time = 2026-01-10T09:23:14Z, size = 175328, hashes = { sha256 = "fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4" } }, + { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", upload-time = 2026-01-10T09:23:15Z, size = 185044, hashes = { sha256 = "781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e" } }, + { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", upload-time = 2026-01-10T09:23:17Z, size = 186279, hashes = { sha256 = "caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4" } }, + { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", upload-time = 2026-01-10T09:23:18Z, size = 185711, hashes = { sha256 = "19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1" } }, + { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", upload-time = 2026-01-10T09:23:19Z, size = 184982, hashes = { sha256 = "a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3" } }, + { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", upload-time = 2026-01-10T09:23:21Z, size = 177915, hashes = { sha256 = "a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8" } }, + { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", upload-time = 2026-01-10T09:23:22Z, size = 178381, hashes = { sha256 = "c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d" } }, + { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", upload-time = 2026-01-10T09:23:24Z, size = 177737, hashes = { sha256 = "a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244" } }, + { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", upload-time = 2026-01-10T09:23:25Z, size = 175268, hashes = { sha256 = "b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e" } }, + { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", upload-time = 2026-01-10T09:23:27Z, size = 175486, hashes = { sha256 = "569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641" } }, + { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", upload-time = 2026-01-10T09:23:28Z, size = 185331, hashes = { sha256 = "50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8" } }, + { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", upload-time = 2026-01-10T09:23:29Z, size = 186501, hashes = { sha256 = "152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e" } }, + { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", upload-time = 2026-01-10T09:23:31Z, size = 186062, hashes = { sha256 = "bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944" } }, + { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", upload-time = 2026-01-10T09:23:32Z, size = 185356, hashes = { sha256 = "32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206" } }, + { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", upload-time = 2026-01-10T09:23:33Z, size = 178085, hashes = { sha256 = "5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6" } }, + { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", upload-time = 2026-01-10T09:23:35Z, size = 178531, hashes = { sha256 = "b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd" } }, + { url = "https://files.pythonhosted.org/packages/72/07/c98a68571dcf256e74f1f816b8cc5eae6eb2d3d5cfa44d37f801619d9166/websockets-16.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", upload-time = 2026-01-10T09:23:36Z, size = 174947, hashes = { sha256 = "349f83cd6c9a415428ee1005cadb5c2c56f4389bc06a9af16103c3bc3dcc8b7d" } }, + { url = "https://files.pythonhosted.org/packages/7e/52/93e166a81e0305b33fe416338be92ae863563fe7bce446b0f687b9df5aea/websockets-16.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", upload-time = 2026-01-10T09:23:37Z, size = 175260, hashes = { sha256 = "4a1aba3340a8dca8db6eb5a7986157f52eb9e436b74813764241981ca4888f03" } }, + { url = "https://files.pythonhosted.org/packages/56/0c/2dbf513bafd24889d33de2ff0368190a0e69f37bcfa19009ef819fe4d507/websockets-16.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", upload-time = 2026-01-10T09:23:39Z, size = 176071, hashes = { sha256 = "f4a32d1bd841d4bcbffdcb3d2ce50c09c3909fbead375ab28d0181af89fd04da" } }, + { url = "https://files.pythonhosted.org/packages/a5/8f/aea9c71cc92bf9b6cc0f7f70df8f0b420636b6c96ef4feee1e16f80f75dd/websockets-16.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", upload-time = 2026-01-10T09:23:41Z, size = 176968, hashes = { sha256 = "0298d07ee155e2e9fda5be8a9042200dd2e3bb0b8a38482156576f863a9d457c" } }, + { url = "https://files.pythonhosted.org/packages/9a/3f/f70e03f40ffc9a30d817eef7da1be72ee4956ba8d7255c399a01b135902a/websockets-16.0-pp311-pypy311_pp73-win_amd64.whl", upload-time = 2026-01-10T09:23:42Z, size = 178735, hashes = { sha256 = "a653aea902e0324b52f1613332ddf50b00c06fdaf7e92624fbf8c77c78fa5767" } }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", upload-time = 2026-01-10T09:23:45Z, size = 171598, hashes = { sha256 = "1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec" } }, +] + [[packages]] name = "wheel" version = "0.45.1" diff --git a/pyproject.toml b/pyproject.toml index 1f4ee48882..aada863326 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,7 @@ dependencies = [ "pip==25.3", "pluggy==1.6.0", "prompt-toolkit==3.0.51", + "protobuf>=3.20,<7", "pydantic==2.12.5", "requests==2.32.4", "requirements-parser==0.13.0", @@ -46,6 +47,7 @@ dependencies = [ "tomlkit==0.13.3", "typer==0.17.3", "urllib3>=2.6.3,<3", + "websockets>=16.0,<17", ] classifiers = [ "Development Status :: 5 - Production/Stable", diff --git a/snyk/requirements.txt b/snyk/requirements.txt index 144536850a..a46c1adaa5 100644 --- a/snyk/requirements.txt +++ b/snyk/requirements.txt @@ -66,5 +66,6 @@ tzdata==2025.2 ; sys_platform == 'win32' tzlocal==5.3.1 urllib3==2.6.3 wcwidth==0.2.13 +websockets==16.0 wheel==0.45.1 zipp==3.23.0 ; python_full_version < '3.12' diff --git a/src/snowflake/cli/_plugins/streamlit/commands.py b/src/snowflake/cli/_plugins/streamlit/commands.py index 1c8ef10ccb..63a33d9482 100644 --- a/src/snowflake/cli/_plugins/streamlit/commands.py +++ b/src/snowflake/cli/_plugins/streamlit/commands.py @@ -215,6 +215,62 @@ def get_url( return MessageResult(url) +@app.command("logs", requires_connection=True) +@with_project_definition() +def streamlit_logs( + entity_id: str = entity_argument("streamlit"), + tail: int = typer.Option( + 100, + "--tail", + "-n", + help="Number of historical log lines to fetch (max: 10000). Use 0 for live logs only.", + ), + **options, +) -> CommandResult: + """ + Streams live logs from a deployed Streamlit app to your terminal. + + Reads the Streamlit app name from the project definition file (snowflake.yml). + Connects to the app's developer log service via WebSocket and prints + log entries in real time. Press Ctrl+C to stop streaming. + """ + from snowflake.cli._plugins.streamlit.log_streaming import ( + MAX_TAIL_LINES, + stream_logs, + ) + + if tail < 0 or tail > MAX_TAIL_LINES: + raise ClickException(f"--tail must be between 0 and {MAX_TAIL_LINES}") + + cli_context = get_cli_context() + conn = cli_context.connection + + pd = cli_context.project_definition + if not pd.meets_version_requirement("2"): + if not pd.streamlit: + raise NoProjectDefinitionError( + project_type="streamlit", project_root=cli_context.project_root + ) + pd = convert_project_definition_to_v2(cli_context.project_root, pd) + + entity_model = get_entity_for_operation( + cli_context=cli_context, + entity_id=entity_id, + project_definition=pd, + entity_type=ObjectType.STREAMLIT.value.cli_name, + ) + + fqn = entity_model.fqn.using_connection(conn) + + stream_logs( + conn=conn, + fqn=str(fqn), + tail_lines=tail, + json_output=cli_context.output_format.is_json, + ) + return MessageResult("Log streaming ended.") + + def _get_current_workspace_context(): ctx = get_cli_context() diff --git a/src/snowflake/cli/_plugins/streamlit/log_streaming.py b/src/snowflake/cli/_plugins/streamlit/log_streaming.py new file mode 100644 index 0000000000..300eacfe46 --- /dev/null +++ b/src/snowflake/cli/_plugins/streamlit/log_streaming.py @@ -0,0 +1,228 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import json +import logging +import sys +from datetime import datetime, timezone +from typing import Tuple + +from click import ClickException +from snowflake.cli._plugins.streamlit.proto.generated.developer.v1 import ( + logs_service_pb2 as pb, +) +from snowflake.cli.api.console import cli_console +from snowflake.connector import SnowflakeConnection + +log = logging.getLogger(__name__) + +# LogSource enum labels +_LOG_SOURCE_LABELS = { + pb.LOG_SOURCE_UNSPECIFIED: "UNKNOWN", + pb.LOG_SOURCE_APP: "APP", + pb.LOG_SOURCE_MANAGER: "MGR", +} + +# LogLevel enum labels +_LOG_LEVEL_LABELS = { + pb.LOG_LEVEL_UNSPECIFIED: "UNKNOWN", + pb.LOG_LEVEL_DEBUG: "DEBUG", + pb.LOG_LEVEL_INFO: "INFO", + pb.LOG_LEVEL_WARN: "WARN", + pb.LOG_LEVEL_ERROR: "ERROR", +} + +DEFAULT_TAIL_LINES = 100 +MAX_TAIL_LINES = 10000 + +# Timeout for each ws.recv() call — mirrors the Go client's 90-second read +# deadline. When no log entry arrives within this window, we re-issue recv() +# so the loop stays responsive to KeyboardInterrupt. +_WS_RECV_TIMEOUT_SECONDS = 90 + + +def get_developer_api_token(conn: SnowflakeConnection, fqn: str) -> Tuple[str, str]: + """ + Calls SYSTEM$GET_STREAMLIT_DEVELOPER_API_TOKEN and returns (token, resource_uri). + """ + if "'" in fqn: + raise ClickException( + f"Invalid Streamlit app name: {fqn}. Name must not contain single quotes." + ) + + query = f"CALL SYSTEM$GET_STREAMLIT_DEVELOPER_API_TOKEN('{fqn}', false);" + log.debug("Fetching developer API token for %s", fqn) + + cursor = conn.cursor() + try: + cursor.execute(query) + row = cursor.fetchone() + if not row: + raise ClickException( + "Empty response from SYSTEM$GET_STREAMLIT_DEVELOPER_API_TOKEN" + ) + raw = row[0] + finally: + cursor.close() + + try: + resp = json.loads(raw) + except (json.JSONDecodeError, TypeError) as e: + raise ClickException(f"Failed to parse token response: {e}") from e + + token = resp.get("token", "") + resource_uri = resp.get("resourceUri", "") + + if not token: + raise ClickException("Empty token in developer API response") + if not resource_uri: + raise ClickException("Empty resourceUri in developer API response") + + log.debug("Resource URI: %s", resource_uri) + return token, resource_uri + + +def build_ws_url(resource_uri: str) -> str: + """Convert resource URI to WebSocket URL and append /logs path.""" + ws_url = resource_uri.replace("https://", "wss://", 1).replace( + "http://", "ws://", 1 + ) + return ws_url.rstrip("/") + "/logs" + + +def _parse_timestamp(entry: pb.LogEntry) -> datetime: + """Extract a timezone-aware UTC datetime from a LogEntry.""" + if entry.HasField("timestamp"): + return entry.timestamp.ToDatetime(tzinfo=timezone.utc) + return datetime.fromtimestamp(0, tz=timezone.utc) + + +def format_log_entry(entry: pb.LogEntry) -> str: + """Format a LogEntry protobuf message into a human-readable line.""" + ts = _parse_timestamp(entry) + ts_str = ts.strftime("%Y-%m-%d %H:%M:%S.") + f"{ts.microsecond // 1000:03d}" + + source = _LOG_SOURCE_LABELS.get(entry.log_source, "UNKNOWN") + level = _LOG_LEVEL_LABELS.get(entry.level, "UNKNOWN") + return f"[{ts_str}] [{level}] [{source}] [seq:{entry.sequence}] {entry.content}" + + +def log_entry_to_dict(entry: pb.LogEntry) -> dict: + """Convert a LogEntry protobuf message into a JSON-serializable dict.""" + ts = _parse_timestamp(entry) + return { + "timestamp": ts.isoformat(), + "level": _LOG_LEVEL_LABELS.get(entry.level, "UNKNOWN"), + "source": _LOG_SOURCE_LABELS.get(entry.log_source, "UNKNOWN"), + "sequence": entry.sequence, + "content": entry.content, + } + + +def stream_logs( + conn: SnowflakeConnection, + fqn: str, + tail_lines: int = DEFAULT_TAIL_LINES, + json_output: bool = False, +) -> None: + """ + Connect to the Streamlit developer log streaming WebSocket and print + log entries to stdout until interrupted. + + When *json_output* is True each log entry is emitted as a single-line + JSON object (JSONL), suitable for piping to ``jq`` or other tools. + """ + try: + import websockets.sync.client as ws_client + except ImportError: + raise ClickException( + "The 'websockets' package is required for log streaming. " + "Install it with: pip install websockets" + ) + + # 1. Get token + cli_console.step("Fetching developer API token...") + token, resource_uri = get_developer_api_token(conn, fqn) + + # 2. Build WebSocket URL + ws_url = build_ws_url(resource_uri) + cli_console.step(f"Connecting to log stream: {ws_url}") + + # 3. Connect + additional_headers = { + "Authorization": f'Snowflake Token="{token}"', + } + + try: + ws = ws_client.connect( + ws_url, + additional_headers=additional_headers, + open_timeout=10, + close_timeout=5, + # Disable automatic WebSocket ping/pong. The log-streaming + # server doesn't respond to pings, so leaving them enabled + # causes the client to close the connection after ~40 s. + ping_interval=None, + ping_timeout=None, + ) + except Exception as e: + raise ClickException(f"Failed to connect to log stream: {e}") from e + + try: + # 4. Send StreamLogsRequest + request = pb.StreamLogsRequest(tail_lines=tail_lines) + ws.send(request.SerializeToString()) + log.debug("Sent StreamLogsRequest with tail_lines=%d", tail_lines) + + cli_console.step(f"Streaming logs (tail={tail_lines}). Press Ctrl+C to stop.") + sys.stdout.write("---\n") + sys.stdout.flush() + + # 5. Read loop + while True: + try: + message = ws.recv(timeout=_WS_RECV_TIMEOUT_SECONDS) + except TimeoutError: + # No message within the timeout window — loop back so we + # stay responsive to KeyboardInterrupt. + continue + except Exception as e: + # ConnectionClosed or unexpected error — stop streaming. + log.debug("WebSocket recv error: %s", e) + break + + if isinstance(message, bytes): + entry = pb.LogEntry() + entry.ParseFromString(message) + if json_output: + sys.stdout.write(json.dumps(log_entry_to_dict(entry)) + "\n") + else: + sys.stdout.write(format_log_entry(entry) + "\n") + sys.stdout.flush() + else: + # Text message (unexpected, but write it) + sys.stdout.write(str(message) + "\n") + sys.stdout.flush() + + except KeyboardInterrupt: + pass + finally: + try: + ws.close() + except Exception: + pass + sys.stdout.write("\n--- Log streaming stopped.\n") + sys.stdout.flush() diff --git a/src/snowflake/cli/_plugins/streamlit/proto/__init__.py b/src/snowflake/cli/_plugins/streamlit/proto/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/snowflake/cli/_plugins/streamlit/proto/developer/v1/logs_service.proto b/src/snowflake/cli/_plugins/streamlit/proto/developer/v1/logs_service.proto new file mode 100644 index 0000000000..de25490e5f --- /dev/null +++ b/src/snowflake/cli/_plugins/streamlit/proto/developer/v1/logs_service.proto @@ -0,0 +1,40 @@ +syntax = "proto3"; + +package developer.v1; + +import "google/protobuf/timestamp.proto"; + +option go_package = "github.com/snowflakedb/streamlit-container-runtime/gen/developer/v1;developerv1"; + +// LogSource identifies the origin of log entries +enum LogSource { + LOG_SOURCE_UNSPECIFIED = 0; + LOG_SOURCE_APP = 1; + LOG_SOURCE_MANAGER = 2; +} + +// LogLevel represents the severity of a log entry +enum LogLevel { + LOG_LEVEL_UNSPECIFIED = 0; + LOG_LEVEL_DEBUG = 1; + LOG_LEVEL_INFO = 2; + LOG_LEVEL_WARN = 3; + LOG_LEVEL_ERROR = 4; +} + +// StreamLogsRequest configures the log stream +message StreamLogsRequest { + // Number of historical lines to send before streaming live logs. + // If 0, only stream live logs. Max: 1000 (buffer size). + int32 tail_lines = 1; +} + +// LogEntry represents a single log line +message LogEntry { + LogSource log_source = 1; + string content = 2; + // Timestamp when the log was captured (UTC) + google.protobuf.Timestamp timestamp = 3; + int64 sequence = 4; + LogLevel level = 5; +} diff --git a/src/snowflake/cli/_plugins/streamlit/proto/generated/__init__.py b/src/snowflake/cli/_plugins/streamlit/proto/generated/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/__init__.py b/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/__init__.py b/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/logs_service_pb2.py b/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/logs_service_pb2.py new file mode 100644 index 0000000000..85a14f96c6 --- /dev/null +++ b/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/logs_service_pb2.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: developer/v1/logs_service.proto +# Regenerate with: +# python -m grpc_tools.protoc \ +# --proto_path=src/snowflake/cli/_plugins/streamlit/proto \ +# --python_out=src/snowflake/cli/_plugins/streamlit/proto/generated \ +# developer/v1/logs_service.proto +# ruff: noqa: SLF001 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x1f\x64\x65veloper/v1/logs_service.proto\x12\x0c\x64\x65veloper.v1\x1a\x1fgoogle/protobuf/timestamp.proto"\'\n\x11StreamLogsRequest\x12\x12\n\ntail_lines\x18\x01 \x01(\x05"\xb0\x01\n\x08LogEntry\x12+\n\nlog_source\x18\x01 \x01(\x0e\x32\x17.developer.v1.LogSource\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\t\x12-\n\ttimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x10\n\x08sequence\x18\x04 \x01(\x03\x12%\n\x05level\x18\x05 \x01(\x0e\x32\x16.developer.v1.LogLevel*S\n\tLogSource\x12\x1a\n\x16LOG_SOURCE_UNSPECIFIED\x10\x00\x12\x12\n\x0eLOG_SOURCE_APP\x10\x01\x12\x16\n\x12LOG_SOURCE_MANAGER\x10\x02*w\n\x08LogLevel\x12\x19\n\x15LOG_LEVEL_UNSPECIFIED\x10\x00\x12\x13\n\x0fLOG_LEVEL_DEBUG\x10\x01\x12\x12\n\x0eLOG_LEVEL_INFO\x10\x02\x12\x12\n\x0eLOG_LEVEL_WARN\x10\x03\x12\x13\n\x0fLOG_LEVEL_ERROR\x10\x04\x42QZOgithub.com/snowflakedb/streamlit-container-runtime/gen/developer/v1;developerv1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "developer.v1.logs_service_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals[ + "DESCRIPTOR" + ]._serialized_options = b"ZOgithub.com/snowflakedb/streamlit-container-runtime/gen/developer/v1;developerv1" + _globals["_LOGSOURCE"]._serialized_start = 302 + _globals["_LOGSOURCE"]._serialized_end = 385 + _globals["_LOGLEVEL"]._serialized_start = 387 + _globals["_LOGLEVEL"]._serialized_end = 506 + _globals["_STREAMLOGSREQUEST"]._serialized_start = 82 + _globals["_STREAMLOGSREQUEST"]._serialized_end = 121 + _globals["_LOGENTRY"]._serialized_start = 124 + _globals["_LOGENTRY"]._serialized_end = 300 +# @@protoc_insertion_point(module_scope) From 81162518262a67e4a61aa8e9b7e59605a5e43e80 Mon Sep 17 00:00:00 2001 From: Sandeep Kumta Vishnu Date: Sat, 21 Feb 2026 17:56:15 -0800 Subject: [PATCH 2/8] refactor: switch from websockets to websocket-client library MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replace the `websockets` (v16) library with `websocket-client` (v1.6+) for WebSocket log streaming. websocket-client is a better fit: no auto-ping (avoids the server compatibility issue), opcode-level frame control via recv_data(), stable API, and lighter (no async internals). Also add missing google.protobuf.timestamp_pb2 import in the generated pb2 file — required for the serialized descriptor that depends on google/protobuf/timestamp.proto. --- pylock.toml | 69 ++----------------- pyproject.toml | 2 +- snyk/requirements.txt | 2 +- .../cli/_plugins/streamlit/log_streaming.py | 51 ++++++-------- .../developer/v1/logs_service_pb2.py | 1 + 5 files changed, 27 insertions(+), 98 deletions(-) diff --git a/pylock.toml b/pylock.toml index ab445f4e6f..8efe8fbd0e 100644 --- a/pylock.toml +++ b/pylock.toml @@ -878,71 +878,10 @@ sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc wheels = [{ url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", upload-time = 2024-01-06T02:10:55Z, size = 34166, hashes = { sha256 = "3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859" } }] [[packages]] -name = "websockets" -version = "16.0" -sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", upload-time = 2026-01-10T09:23:47Z, size = 179346, hashes = { sha256 = "5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5" } } -wheels = [ - { url = "https://files.pythonhosted.org/packages/20/74/221f58decd852f4b59cc3354cccaf87e8ef695fede361d03dc9a7396573b/websockets-16.0-cp310-cp310-macosx_10_9_universal2.whl", upload-time = 2026-01-10T09:22:21Z, size = 177343, hashes = { sha256 = "04cdd5d2d1dacbad0a7bf36ccbcd3ccd5a30ee188f2560b7a62a30d14107b31a" } }, - { url = "https://files.pythonhosted.org/packages/19/0f/22ef6107ee52ab7f0b710d55d36f5a5d3ef19e8a205541a6d7ffa7994e5a/websockets-16.0-cp310-cp310-macosx_10_9_x86_64.whl", upload-time = 2026-01-10T09:22:22Z, size = 175021, hashes = { sha256 = "8ff32bb86522a9e5e31439a58addbb0166f0204d64066fb955265c4e214160f0" } }, - { url = "https://files.pythonhosted.org/packages/10/40/904a4cb30d9b61c0e278899bf36342e9b0208eb3c470324a9ecbaac2a30f/websockets-16.0-cp310-cp310-macosx_11_0_arm64.whl", upload-time = 2026-01-10T09:22:23Z, size = 175320, hashes = { sha256 = "583b7c42688636f930688d712885cf1531326ee05effd982028212ccc13e5957" } }, - { url = "https://files.pythonhosted.org/packages/9d/2f/4b3ca7e106bc608744b1cdae041e005e446124bebb037b18799c2d356864/websockets-16.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", upload-time = 2026-01-10T09:22:25Z, size = 183815, hashes = { sha256 = "7d837379b647c0c4c2355c2499723f82f1635fd2c26510e1f587d89bc2199e72" } }, - { url = "https://files.pythonhosted.org/packages/86/26/d40eaa2a46d4302becec8d15b0fc5e45bdde05191e7628405a19cf491ccd/websockets-16.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", upload-time = 2026-01-10T09:22:27Z, size = 185054, hashes = { sha256 = "df57afc692e517a85e65b72e165356ed1df12386ecb879ad5693be08fac65dde" } }, - { url = "https://files.pythonhosted.org/packages/b0/ba/6500a0efc94f7373ee8fefa8c271acdfd4dca8bd49a90d4be7ccabfc397e/websockets-16.0-cp310-cp310-musllinux_1_2_aarch64.whl", upload-time = 2026-01-10T09:22:28Z, size = 184565, hashes = { sha256 = "2b9f1e0d69bc60a4a87349d50c09a037a2607918746f07de04df9e43252c77a3" } }, - { url = "https://files.pythonhosted.org/packages/04/b4/96bf2cee7c8d8102389374a2616200574f5f01128d1082f44102140344cc/websockets-16.0-cp310-cp310-musllinux_1_2_x86_64.whl", upload-time = 2026-01-10T09:22:30Z, size = 183848, hashes = { sha256 = "335c23addf3d5e6a8633f9f8eda77efad001671e80b95c491dd0924587ece0b3" } }, - { url = "https://files.pythonhosted.org/packages/02/8e/81f40fb00fd125357814e8c3025738fc4ffc3da4b6b4a4472a82ba304b41/websockets-16.0-cp310-cp310-win32.whl", upload-time = 2026-01-10T09:22:32Z, size = 178249, hashes = { sha256 = "37b31c1623c6605e4c00d466c9d633f9b812ea430c11c8a278774a1fde1acfa9" } }, - { url = "https://files.pythonhosted.org/packages/b4/5f/7e40efe8df57db9b91c88a43690ac66f7b7aa73a11aa6a66b927e44f26fa/websockets-16.0-cp310-cp310-win_amd64.whl", upload-time = 2026-01-10T09:22:33Z, size = 178685, hashes = { sha256 = "8e1dab317b6e77424356e11e99a432b7cb2f3ec8c5ab4dabbcee6add48f72b35" } }, - { url = "https://files.pythonhosted.org/packages/f2/db/de907251b4ff46ae804ad0409809504153b3f30984daf82a1d84a9875830/websockets-16.0-cp311-cp311-macosx_10_9_universal2.whl", upload-time = 2026-01-10T09:22:34Z, size = 177340, hashes = { sha256 = "31a52addea25187bde0797a97d6fc3d2f92b6f72a9370792d65a6e84615ac8a8" } }, - { url = "https://files.pythonhosted.org/packages/f3/fa/abe89019d8d8815c8781e90d697dec52523fb8ebe308bf11664e8de1877e/websockets-16.0-cp311-cp311-macosx_10_9_x86_64.whl", upload-time = 2026-01-10T09:22:36Z, size = 175022, hashes = { sha256 = "417b28978cdccab24f46400586d128366313e8a96312e4b9362a4af504f3bbad" } }, - { url = "https://files.pythonhosted.org/packages/58/5d/88ea17ed1ded2079358b40d31d48abe90a73c9e5819dbcde1606e991e2ad/websockets-16.0-cp311-cp311-macosx_11_0_arm64.whl", upload-time = 2026-01-10T09:22:37Z, size = 175319, hashes = { sha256 = "af80d74d4edfa3cb9ed973a0a5ba2b2a549371f8a741e0800cb07becdd20f23d" } }, - { url = "https://files.pythonhosted.org/packages/d2/ae/0ee92b33087a33632f37a635e11e1d99d429d3d323329675a6022312aac2/websockets-16.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", upload-time = 2026-01-10T09:22:38Z, size = 184631, hashes = { sha256 = "08d7af67b64d29823fed316505a89b86705f2b7981c07848fb5e3ea3020c1abe" } }, - { url = "https://files.pythonhosted.org/packages/c8/c5/27178df583b6c5b31b29f526ba2da5e2f864ecc79c99dae630a85d68c304/websockets-16.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", upload-time = 2026-01-10T09:22:39Z, size = 185870, hashes = { sha256 = "7be95cfb0a4dae143eaed2bcba8ac23f4892d8971311f1b06f3c6b78952ee70b" } }, - { url = "https://files.pythonhosted.org/packages/87/05/536652aa84ddc1c018dbb7e2c4cbcd0db884580bf8e95aece7593fde526f/websockets-16.0-cp311-cp311-musllinux_1_2_aarch64.whl", upload-time = 2026-01-10T09:22:41Z, size = 185361, hashes = { sha256 = "d6297ce39ce5c2e6feb13c1a996a2ded3b6832155fcfc920265c76f24c7cceb5" } }, - { url = "https://files.pythonhosted.org/packages/6d/e2/d5332c90da12b1e01f06fb1b85c50cfc489783076547415bf9f0a659ec19/websockets-16.0-cp311-cp311-musllinux_1_2_x86_64.whl", upload-time = 2026-01-10T09:22:42Z, size = 184615, hashes = { sha256 = "1c1b30e4f497b0b354057f3467f56244c603a79c0d1dafce1d16c283c25f6e64" } }, - { url = "https://files.pythonhosted.org/packages/77/fb/d3f9576691cae9253b51555f841bc6600bf0a983a461c79500ace5a5b364/websockets-16.0-cp311-cp311-win32.whl", upload-time = 2026-01-10T09:22:43Z, size = 178246, hashes = { sha256 = "5f451484aeb5cafee1ccf789b1b66f535409d038c56966d6101740c1614b86c6" } }, - { url = "https://files.pythonhosted.org/packages/54/67/eaff76b3dbaf18dcddabc3b8c1dba50b483761cccff67793897945b37408/websockets-16.0-cp311-cp311-win_amd64.whl", upload-time = 2026-01-10T09:22:44Z, size = 178684, hashes = { sha256 = "8d7f0659570eefb578dacde98e24fb60af35350193e4f56e11190787bee77dac" } }, - { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", upload-time = 2026-01-10T09:22:46Z, size = 177365, hashes = { sha256 = "71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00" } }, - { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", upload-time = 2026-01-10T09:22:47Z, size = 175038, hashes = { sha256 = "8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79" } }, - { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", upload-time = 2026-01-10T09:22:49Z, size = 175328, hashes = { sha256 = "86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39" } }, - { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", upload-time = 2026-01-10T09:22:51Z, size = 184915, hashes = { sha256 = "9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c" } }, - { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", upload-time = 2026-01-10T09:22:52Z, size = 186152, hashes = { sha256 = "e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f" } }, - { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", upload-time = 2026-01-10T09:22:53Z, size = 185583, hashes = { sha256 = "a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1" } }, - { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", upload-time = 2026-01-10T09:22:55Z, size = 184880, hashes = { sha256 = "485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2" } }, - { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", upload-time = 2026-01-10T09:22:56Z, size = 178261, hashes = { sha256 = "eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89" } }, - { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", upload-time = 2026-01-10T09:22:57Z, size = 178693, hashes = { sha256 = "5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea" } }, - { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", upload-time = 2026-01-10T09:22:59Z, size = 177364, hashes = { sha256 = "878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9" } }, - { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", upload-time = 2026-01-10T09:23:01Z, size = 175039, hashes = { sha256 = "52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230" } }, - { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", upload-time = 2026-01-10T09:23:02Z, size = 175323, hashes = { sha256 = "e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c" } }, - { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", upload-time = 2026-01-10T09:23:03Z, size = 184975, hashes = { sha256 = "95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5" } }, - { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", upload-time = 2026-01-10T09:23:05Z, size = 186203, hashes = { sha256 = "c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82" } }, - { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", upload-time = 2026-01-10T09:23:06Z, size = 185653, hashes = { sha256 = "52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8" } }, - { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", upload-time = 2026-01-10T09:23:07Z, size = 184920, hashes = { sha256 = "6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f" } }, - { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", upload-time = 2026-01-10T09:23:09Z, size = 178255, hashes = { sha256 = "abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a" } }, - { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", upload-time = 2026-01-10T09:23:10Z, size = 178689, hashes = { sha256 = "3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156" } }, - { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", upload-time = 2026-01-10T09:23:12Z, size = 177406, hashes = { sha256 = "8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0" } }, - { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", upload-time = 2026-01-10T09:23:13Z, size = 175085, hashes = { sha256 = "daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904" } }, - { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", upload-time = 2026-01-10T09:23:14Z, size = 175328, hashes = { sha256 = "fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4" } }, - { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", upload-time = 2026-01-10T09:23:15Z, size = 185044, hashes = { sha256 = "781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e" } }, - { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", upload-time = 2026-01-10T09:23:17Z, size = 186279, hashes = { sha256 = "caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4" } }, - { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", upload-time = 2026-01-10T09:23:18Z, size = 185711, hashes = { sha256 = "19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1" } }, - { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", upload-time = 2026-01-10T09:23:19Z, size = 184982, hashes = { sha256 = "a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3" } }, - { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", upload-time = 2026-01-10T09:23:21Z, size = 177915, hashes = { sha256 = "a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8" } }, - { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", upload-time = 2026-01-10T09:23:22Z, size = 178381, hashes = { sha256 = "c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d" } }, - { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", upload-time = 2026-01-10T09:23:24Z, size = 177737, hashes = { sha256 = "a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244" } }, - { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", upload-time = 2026-01-10T09:23:25Z, size = 175268, hashes = { sha256 = "b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e" } }, - { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", upload-time = 2026-01-10T09:23:27Z, size = 175486, hashes = { sha256 = "569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641" } }, - { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", upload-time = 2026-01-10T09:23:28Z, size = 185331, hashes = { sha256 = "50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8" } }, - { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", upload-time = 2026-01-10T09:23:29Z, size = 186501, hashes = { sha256 = "152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e" } }, - { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", upload-time = 2026-01-10T09:23:31Z, size = 186062, hashes = { sha256 = "bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944" } }, - { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", upload-time = 2026-01-10T09:23:32Z, size = 185356, hashes = { sha256 = "32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206" } }, - { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", upload-time = 2026-01-10T09:23:33Z, size = 178085, hashes = { sha256 = "5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6" } }, - { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", upload-time = 2026-01-10T09:23:35Z, size = 178531, hashes = { sha256 = "b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd" } }, - { url = "https://files.pythonhosted.org/packages/72/07/c98a68571dcf256e74f1f816b8cc5eae6eb2d3d5cfa44d37f801619d9166/websockets-16.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", upload-time = 2026-01-10T09:23:36Z, size = 174947, hashes = { sha256 = "349f83cd6c9a415428ee1005cadb5c2c56f4389bc06a9af16103c3bc3dcc8b7d" } }, - { url = "https://files.pythonhosted.org/packages/7e/52/93e166a81e0305b33fe416338be92ae863563fe7bce446b0f687b9df5aea/websockets-16.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", upload-time = 2026-01-10T09:23:37Z, size = 175260, hashes = { sha256 = "4a1aba3340a8dca8db6eb5a7986157f52eb9e436b74813764241981ca4888f03" } }, - { url = "https://files.pythonhosted.org/packages/56/0c/2dbf513bafd24889d33de2ff0368190a0e69f37bcfa19009ef819fe4d507/websockets-16.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", upload-time = 2026-01-10T09:23:39Z, size = 176071, hashes = { sha256 = "f4a32d1bd841d4bcbffdcb3d2ce50c09c3909fbead375ab28d0181af89fd04da" } }, - { url = "https://files.pythonhosted.org/packages/a5/8f/aea9c71cc92bf9b6cc0f7f70df8f0b420636b6c96ef4feee1e16f80f75dd/websockets-16.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", upload-time = 2026-01-10T09:23:41Z, size = 176968, hashes = { sha256 = "0298d07ee155e2e9fda5be8a9042200dd2e3bb0b8a38482156576f863a9d457c" } }, - { url = "https://files.pythonhosted.org/packages/9a/3f/f70e03f40ffc9a30d817eef7da1be72ee4956ba8d7255c399a01b135902a/websockets-16.0-pp311-pypy311_pp73-win_amd64.whl", upload-time = 2026-01-10T09:23:42Z, size = 178735, hashes = { sha256 = "a653aea902e0324b52f1613332ddf50b00c06fdaf7e92624fbf8c77c78fa5767" } }, - { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", upload-time = 2026-01-10T09:23:45Z, size = 171598, hashes = { sha256 = "1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec" } }, -] +name = "websocket-client" +version = "1.9.0" +sdist = { url = "https://files.pythonhosted.org/packages/2c/41/aa4bf9664e4cda14c3b39865b12251e8e7d239f4cd0e3cc1b6c2ccde25c1/websocket_client-1.9.0.tar.gz", upload-time = 2025-10-07T21:16:36Z, size = 70576, hashes = { sha256 = "9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98" } } +wheels = [{ url = "https://files.pythonhosted.org/packages/34/db/b10e48aa8fff7407e67470363eac595018441cf32d5e1001567a7aeba5d2/websocket_client-1.9.0-py3-none-any.whl", upload-time = 2025-10-07T21:16:34Z, size = 82616, hashes = { sha256 = "af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef" } }] [[packages]] name = "wheel" diff --git a/pyproject.toml b/pyproject.toml index aada863326..4b12cf6d5c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,7 +47,7 @@ dependencies = [ "tomlkit==0.13.3", "typer==0.17.3", "urllib3>=2.6.3,<3", - "websockets>=16.0,<17", + "websocket-client>=1.6.0,<2", ] classifiers = [ "Development Status :: 5 - Production/Stable", diff --git a/snyk/requirements.txt b/snyk/requirements.txt index a46c1adaa5..ccd92971c6 100644 --- a/snyk/requirements.txt +++ b/snyk/requirements.txt @@ -66,6 +66,6 @@ tzdata==2025.2 ; sys_platform == 'win32' tzlocal==5.3.1 urllib3==2.6.3 wcwidth==0.2.13 -websockets==16.0 +websocket-client==1.9.0 wheel==0.45.1 zipp==3.23.0 ; python_full_version < '3.12' diff --git a/src/snowflake/cli/_plugins/streamlit/log_streaming.py b/src/snowflake/cli/_plugins/streamlit/log_streaming.py index 300eacfe46..ea5a1504e1 100644 --- a/src/snowflake/cli/_plugins/streamlit/log_streaming.py +++ b/src/snowflake/cli/_plugins/streamlit/log_streaming.py @@ -132,6 +132,9 @@ def log_entry_to_dict(entry: pb.LogEntry) -> dict: } +_HANDSHAKE_TIMEOUT_SECONDS = 10 + + def stream_logs( conn: SnowflakeConnection, fqn: str, @@ -145,13 +148,7 @@ def stream_logs( When *json_output* is True each log entry is emitted as a single-line JSON object (JSONL), suitable for piping to ``jq`` or other tools. """ - try: - import websockets.sync.client as ws_client - except ImportError: - raise ClickException( - "The 'websockets' package is required for log streaming. " - "Install it with: pip install websockets" - ) + import websocket # 1. Get token cli_console.step("Fetching developer API token...") @@ -162,29 +159,19 @@ def stream_logs( cli_console.step(f"Connecting to log stream: {ws_url}") # 3. Connect - additional_headers = { - "Authorization": f'Snowflake Token="{token}"', - } + header = [f'Authorization: Snowflake Token="{token}"'] + ws = websocket.WebSocket() + ws.timeout = _WS_RECV_TIMEOUT_SECONDS try: - ws = ws_client.connect( - ws_url, - additional_headers=additional_headers, - open_timeout=10, - close_timeout=5, - # Disable automatic WebSocket ping/pong. The log-streaming - # server doesn't respond to pings, so leaving them enabled - # causes the client to close the connection after ~40 s. - ping_interval=None, - ping_timeout=None, - ) + ws.connect(ws_url, header=header, timeout=_HANDSHAKE_TIMEOUT_SECONDS) except Exception as e: raise ClickException(f"Failed to connect to log stream: {e}") from e try: # 4. Send StreamLogsRequest request = pb.StreamLogsRequest(tail_lines=tail_lines) - ws.send(request.SerializeToString()) + ws.send_binary(request.SerializeToString()) log.debug("Sent StreamLogsRequest with tail_lines=%d", tail_lines) cli_console.step(f"Streaming logs (tail={tail_lines}). Press Ctrl+C to stop.") @@ -194,28 +181,30 @@ def stream_logs( # 5. Read loop while True: try: - message = ws.recv(timeout=_WS_RECV_TIMEOUT_SECONDS) - except TimeoutError: + opcode, data = ws.recv_data() + except websocket.WebSocketTimeoutException: # No message within the timeout window — loop back so we # stay responsive to KeyboardInterrupt. continue + except websocket.WebSocketConnectionClosedException: + log.debug("WebSocket connection closed by server") + break except Exception as e: - # ConnectionClosed or unexpected error — stop streaming. log.debug("WebSocket recv error: %s", e) break - if isinstance(message, bytes): + if opcode == websocket.ABNF.OPCODE_BINARY: entry = pb.LogEntry() - entry.ParseFromString(message) + entry.ParseFromString(data) if json_output: sys.stdout.write(json.dumps(log_entry_to_dict(entry)) + "\n") else: sys.stdout.write(format_log_entry(entry) + "\n") sys.stdout.flush() - else: - # Text message (unexpected, but write it) - sys.stdout.write(str(message) + "\n") - sys.stdout.flush() + elif opcode == websocket.ABNF.OPCODE_CLOSE: + break + elif opcode == websocket.ABNF.OPCODE_PING: + ws.pong(data) except KeyboardInterrupt: pass diff --git a/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/logs_service_pb2.py b/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/logs_service_pb2.py index 85a14f96c6..5edab65a54 100644 --- a/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/logs_service_pb2.py +++ b/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/logs_service_pb2.py @@ -11,6 +11,7 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database +from google.protobuf import timestamp_pb2 as _timestamp_pb2 # noqa: F401 from google.protobuf.internal import builder as _builder # @@protoc_insertion_point(imports) From c2a642acc453a69d87df3e189186c801f41dffad Mon Sep 17 00:00:00 2001 From: Sandeep Kumta Vishnu Date: Sat, 21 Feb 2026 18:07:24 -0800 Subject: [PATCH 3/8] refactor: add proto_codec layer, DeveloperApiToken, tests, and polish - Add proto_codec.py with LogEntry dataclass, decode/encode helpers, and to_dict() for JSONL output. Decouples proto from app logic. - Refactor log_streaming.py to use proto_codec instead of raw pb2 - Return DeveloperApiToken dataclass instead of Tuple[str, str] - Add decode error handling (DecodeError, ValueError) with log.warning - Use ws.close(status=STATUS_NORMAL) for explicit close status - Use Typer min/max on --tail instead of manual validation - Add protobuf runtime version compat (try/except) in pb2 file - Add Apache 2.0 license headers to all new/empty files - Add test_streamlit_logs.py with 26 unit tests covering URL building, token fetching, proto codec round-trips, and WebSocket streaming --- .../cli/_plugins/streamlit/commands.py | 8 +- .../cli/_plugins/streamlit/log_streaming.py | 110 ++--- .../cli/_plugins/streamlit/proto/__init__.py | 13 + .../streamlit/proto/generated/__init__.py | 13 + .../proto/generated/developer/__init__.py | 13 + .../proto/generated/developer/v1/__init__.py | 13 + .../developer/v1/logs_service_pb2.py | 18 + .../cli/_plugins/streamlit/proto_codec.py | 109 +++++ tests/streamlit/test_streamlit_logs.py | 458 ++++++++++++++++++ 9 files changed, 680 insertions(+), 75 deletions(-) create mode 100644 src/snowflake/cli/_plugins/streamlit/proto_codec.py create mode 100644 tests/streamlit/test_streamlit_logs.py diff --git a/src/snowflake/cli/_plugins/streamlit/commands.py b/src/snowflake/cli/_plugins/streamlit/commands.py index 63a33d9482..7339adc967 100644 --- a/src/snowflake/cli/_plugins/streamlit/commands.py +++ b/src/snowflake/cli/_plugins/streamlit/commands.py @@ -223,7 +223,9 @@ def streamlit_logs( 100, "--tail", "-n", - help="Number of historical log lines to fetch (max: 10000). Use 0 for live logs only.", + min=0, + max=10000, + help="Number of historical log lines to fetch. Use 0 for live logs only.", ), **options, ) -> CommandResult: @@ -235,13 +237,9 @@ def streamlit_logs( log entries in real time. Press Ctrl+C to stop streaming. """ from snowflake.cli._plugins.streamlit.log_streaming import ( - MAX_TAIL_LINES, stream_logs, ) - if tail < 0 or tail > MAX_TAIL_LINES: - raise ClickException(f"--tail must be between 0 and {MAX_TAIL_LINES}") - cli_context = get_cli_context() conn = cli_context.connection diff --git a/src/snowflake/cli/_plugins/streamlit/log_streaming.py b/src/snowflake/cli/_plugins/streamlit/log_streaming.py index ea5a1504e1..dc96b0ead5 100644 --- a/src/snowflake/cli/_plugins/streamlit/log_streaming.py +++ b/src/snowflake/cli/_plugins/streamlit/log_streaming.py @@ -12,51 +12,53 @@ # See the License for the specific language governing permissions and # limitations under the License. +""" +WebSocket log streaming client for Streamlit developer logs. + +Connects to the Streamlit container runtime's developer log service +via WebSocket and streams log entries in real time. +""" + from __future__ import annotations import json import logging import sys -from datetime import datetime, timezone -from typing import Tuple +from dataclasses import dataclass +import websocket from click import ClickException -from snowflake.cli._plugins.streamlit.proto.generated.developer.v1 import ( - logs_service_pb2 as pb, +from google.protobuf.message import DecodeError +from snowflake.cli._plugins.streamlit.proto_codec import ( + decode_log_entry, + encode_stream_logs_request, ) from snowflake.cli.api.console import cli_console from snowflake.connector import SnowflakeConnection log = logging.getLogger(__name__) -# LogSource enum labels -_LOG_SOURCE_LABELS = { - pb.LOG_SOURCE_UNSPECIFIED: "UNKNOWN", - pb.LOG_SOURCE_APP: "APP", - pb.LOG_SOURCE_MANAGER: "MGR", -} - -# LogLevel enum labels -_LOG_LEVEL_LABELS = { - pb.LOG_LEVEL_UNSPECIFIED: "UNKNOWN", - pb.LOG_LEVEL_DEBUG: "DEBUG", - pb.LOG_LEVEL_INFO: "INFO", - pb.LOG_LEVEL_WARN: "WARN", - pb.LOG_LEVEL_ERROR: "ERROR", -} - DEFAULT_TAIL_LINES = 100 MAX_TAIL_LINES = 10000 -# Timeout for each ws.recv() call — mirrors the Go client's 90-second read -# deadline. When no log entry arrives within this window, we re-issue recv() -# so the loop stays responsive to KeyboardInterrupt. +# Timeout for each ws.recv_data() call — mirrors the Go client's 90-second +# read deadline. When no log entry arrives within this window, we re-issue +# recv_data() so the loop stays responsive to KeyboardInterrupt. _WS_RECV_TIMEOUT_SECONDS = 90 +_HANDSHAKE_TIMEOUT_SECONDS = 10 + -def get_developer_api_token(conn: SnowflakeConnection, fqn: str) -> Tuple[str, str]: +@dataclass +class DeveloperApiToken: + token: str + resource_uri: str + + +def get_developer_api_token(conn: SnowflakeConnection, fqn: str) -> DeveloperApiToken: """ - Calls SYSTEM$GET_STREAMLIT_DEVELOPER_API_TOKEN and returns (token, resource_uri). + Calls SYSTEM$GET_STREAMLIT_DEVELOPER_API_TOKEN and returns a + DeveloperApiToken with the token and resource URI. """ if "'" in fqn: raise ClickException( @@ -92,7 +94,7 @@ def get_developer_api_token(conn: SnowflakeConnection, fqn: str) -> Tuple[str, s raise ClickException("Empty resourceUri in developer API response") log.debug("Resource URI: %s", resource_uri) - return token, resource_uri + return DeveloperApiToken(token=token, resource_uri=resource_uri) def build_ws_url(resource_uri: str) -> str: @@ -103,38 +105,6 @@ def build_ws_url(resource_uri: str) -> str: return ws_url.rstrip("/") + "/logs" -def _parse_timestamp(entry: pb.LogEntry) -> datetime: - """Extract a timezone-aware UTC datetime from a LogEntry.""" - if entry.HasField("timestamp"): - return entry.timestamp.ToDatetime(tzinfo=timezone.utc) - return datetime.fromtimestamp(0, tz=timezone.utc) - - -def format_log_entry(entry: pb.LogEntry) -> str: - """Format a LogEntry protobuf message into a human-readable line.""" - ts = _parse_timestamp(entry) - ts_str = ts.strftime("%Y-%m-%d %H:%M:%S.") + f"{ts.microsecond // 1000:03d}" - - source = _LOG_SOURCE_LABELS.get(entry.log_source, "UNKNOWN") - level = _LOG_LEVEL_LABELS.get(entry.level, "UNKNOWN") - return f"[{ts_str}] [{level}] [{source}] [seq:{entry.sequence}] {entry.content}" - - -def log_entry_to_dict(entry: pb.LogEntry) -> dict: - """Convert a LogEntry protobuf message into a JSON-serializable dict.""" - ts = _parse_timestamp(entry) - return { - "timestamp": ts.isoformat(), - "level": _LOG_LEVEL_LABELS.get(entry.level, "UNKNOWN"), - "source": _LOG_SOURCE_LABELS.get(entry.log_source, "UNKNOWN"), - "sequence": entry.sequence, - "content": entry.content, - } - - -_HANDSHAKE_TIMEOUT_SECONDS = 10 - - def stream_logs( conn: SnowflakeConnection, fqn: str, @@ -148,18 +118,16 @@ def stream_logs( When *json_output* is True each log entry is emitted as a single-line JSON object (JSONL), suitable for piping to ``jq`` or other tools. """ - import websocket - # 1. Get token cli_console.step("Fetching developer API token...") - token, resource_uri = get_developer_api_token(conn, fqn) + token_info = get_developer_api_token(conn, fqn) # 2. Build WebSocket URL - ws_url = build_ws_url(resource_uri) + ws_url = build_ws_url(token_info.resource_uri) cli_console.step(f"Connecting to log stream: {ws_url}") # 3. Connect - header = [f'Authorization: Snowflake Token="{token}"'] + header = [f'Authorization: Snowflake Token="{token_info.token}"'] ws = websocket.WebSocket() ws.timeout = _WS_RECV_TIMEOUT_SECONDS @@ -170,8 +138,7 @@ def stream_logs( try: # 4. Send StreamLogsRequest - request = pb.StreamLogsRequest(tail_lines=tail_lines) - ws.send_binary(request.SerializeToString()) + ws.send_binary(encode_stream_logs_request(tail_lines)) log.debug("Sent StreamLogsRequest with tail_lines=%d", tail_lines) cli_console.step(f"Streaming logs (tail={tail_lines}). Press Ctrl+C to stop.") @@ -194,12 +161,15 @@ def stream_logs( break if opcode == websocket.ABNF.OPCODE_BINARY: - entry = pb.LogEntry() - entry.ParseFromString(data) + try: + entry = decode_log_entry(data) + except (DecodeError, ValueError) as e: + log.warning("Failed to decode log entry: %s", e) + continue if json_output: - sys.stdout.write(json.dumps(log_entry_to_dict(entry)) + "\n") + sys.stdout.write(json.dumps(entry.to_dict()) + "\n") else: - sys.stdout.write(format_log_entry(entry) + "\n") + sys.stdout.write(entry.format_line() + "\n") sys.stdout.flush() elif opcode == websocket.ABNF.OPCODE_CLOSE: break @@ -210,7 +180,7 @@ def stream_logs( pass finally: try: - ws.close() + ws.close(status=websocket.STATUS_NORMAL) except Exception: pass sys.stdout.write("\n--- Log streaming stopped.\n") diff --git a/src/snowflake/cli/_plugins/streamlit/proto/__init__.py b/src/snowflake/cli/_plugins/streamlit/proto/__init__.py index e69de29bb2..ada0a4e13d 100644 --- a/src/snowflake/cli/_plugins/streamlit/proto/__init__.py +++ b/src/snowflake/cli/_plugins/streamlit/proto/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/snowflake/cli/_plugins/streamlit/proto/generated/__init__.py b/src/snowflake/cli/_plugins/streamlit/proto/generated/__init__.py index e69de29bb2..ada0a4e13d 100644 --- a/src/snowflake/cli/_plugins/streamlit/proto/generated/__init__.py +++ b/src/snowflake/cli/_plugins/streamlit/proto/generated/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/__init__.py b/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/__init__.py index e69de29bb2..ada0a4e13d 100644 --- a/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/__init__.py +++ b/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/__init__.py b/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/__init__.py index e69de29bb2..ada0a4e13d 100644 --- a/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/__init__.py +++ b/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/logs_service_pb2.py b/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/logs_service_pb2.py index 5edab65a54..faa82664bd 100644 --- a/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/logs_service_pb2.py +++ b/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/logs_service_pb2.py @@ -7,6 +7,10 @@ # --python_out=src/snowflake/cli/_plugins/streamlit/proto/generated \ # developer/v1/logs_service.proto # ruff: noqa: SLF001 +# NOTE: The runtime version check below is wrapped in a try/except for +# compatibility with protobuf 5.x (pulled by snowflake-connector-python) and 6.x. +# IMPORTANT: After regenerating, you must re-apply the try/except wrapper around +# the ValidateProtobufRuntimeVersion call. """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -14,6 +18,20 @@ from google.protobuf import timestamp_pb2 as _timestamp_pb2 # noqa: F401 from google.protobuf.internal import builder as _builder +try: + from google.protobuf import runtime_version as _runtime_version + + _runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 32, + 1, + "", + "developer/v1/logs_service.proto", + ) +except Exception: + pass # protobuf 5.x compat: may lack runtime_version or fail version check + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() diff --git a/src/snowflake/cli/_plugins/streamlit/proto_codec.py b/src/snowflake/cli/_plugins/streamlit/proto_codec.py new file mode 100644 index 0000000000..95e22196ca --- /dev/null +++ b/src/snowflake/cli/_plugins/streamlit/proto_codec.py @@ -0,0 +1,109 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Protobuf codec for the Streamlit developer log streaming protocol. + +Uses generated protobuf classes from logs_service.proto and provides +a Python-friendly dataclass wrapper for log entries. +""" + +from __future__ import annotations + +from dataclasses import dataclass +from datetime import datetime, timezone + +from snowflake.cli._plugins.streamlit.proto.generated.developer.v1 import ( + logs_service_pb2 as pb2, +) + +# Re-export enum values for convenience +LOG_SOURCE_APP = pb2.LOG_SOURCE_APP +LOG_SOURCE_MANAGER = pb2.LOG_SOURCE_MANAGER +LOG_SOURCE_UNSPECIFIED = pb2.LOG_SOURCE_UNSPECIFIED + +LOG_LEVEL_DEBUG = pb2.LOG_LEVEL_DEBUG +LOG_LEVEL_INFO = pb2.LOG_LEVEL_INFO +LOG_LEVEL_WARN = pb2.LOG_LEVEL_WARN +LOG_LEVEL_ERROR = pb2.LOG_LEVEL_ERROR +LOG_LEVEL_UNSPECIFIED = pb2.LOG_LEVEL_UNSPECIFIED + +LOG_SOURCE_LABELS = { + LOG_SOURCE_APP: "APP", + LOG_SOURCE_MANAGER: "MGR", + LOG_SOURCE_UNSPECIFIED: "UNKNOWN", +} + +LOG_LEVEL_LABELS = { + LOG_LEVEL_UNSPECIFIED: "UNKNOWN", + LOG_LEVEL_DEBUG: "DEBUG", + LOG_LEVEL_INFO: "INFO", + LOG_LEVEL_WARN: "WARN", + LOG_LEVEL_ERROR: "ERROR", +} + + +@dataclass +class LogEntry: + log_source: int + content: str + timestamp: datetime + sequence: int + level: int + + @property + def source_label(self) -> str: + return LOG_SOURCE_LABELS.get(self.log_source, "UNKNOWN") + + @property + def level_label(self) -> str: + return LOG_LEVEL_LABELS.get(self.level, "UNKNOWN") + + def format_line(self) -> str: + ts = self.timestamp.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] + return f"[{ts}] [{self.level_label}] [{self.source_label}] [seq:{self.sequence}] {self.content}" + + def to_dict(self) -> dict: + return { + "timestamp": self.timestamp.isoformat(), + "level": self.level_label, + "source": self.source_label, + "sequence": self.sequence, + "content": self.content, + } + + +def encode_stream_logs_request(tail_lines: int) -> bytes: + """Encode a StreamLogsRequest protobuf message to binary.""" + request = pb2.StreamLogsRequest(tail_lines=tail_lines) + return request.SerializeToString() + + +def decode_log_entry(data: bytes) -> LogEntry: + """Decode a binary protobuf LogEntry message into a Python dataclass.""" + entry = pb2.LogEntry() + entry.ParseFromString(data) + + if entry.HasField("timestamp"): + ts = entry.timestamp.ToDatetime(tzinfo=timezone.utc) + else: + ts = datetime.fromtimestamp(0, tz=timezone.utc) + + return LogEntry( + log_source=entry.log_source, + content=entry.content, + timestamp=ts, + sequence=entry.sequence, + level=entry.level, + ) diff --git a/tests/streamlit/test_streamlit_logs.py b/tests/streamlit/test_streamlit_logs.py new file mode 100644 index 0000000000..682554f0ca --- /dev/null +++ b/tests/streamlit/test_streamlit_logs.py @@ -0,0 +1,458 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from datetime import datetime, timezone +from unittest import mock + +import pytest +from click import ClickException +from snowflake.cli._plugins.streamlit.log_streaming import ( + DeveloperApiToken, + build_ws_url, + get_developer_api_token, + stream_logs, +) +from snowflake.cli._plugins.streamlit.proto_codec import ( + LOG_LEVEL_INFO, + LOG_LEVEL_WARN, + LOG_SOURCE_APP, + LOG_SOURCE_MANAGER, + LogEntry, + decode_log_entry, + encode_stream_logs_request, +) + + +class TestBuildWsUrl: + def test_https_to_wss(self): + url = build_ws_url("https://my-app.snowflakecomputing.com/api/v1") + assert url == "wss://my-app.snowflakecomputing.com/api/v1/logs" + + def test_http_to_ws(self): + url = build_ws_url("http://localhost:8702") + assert url == "ws://localhost:8702/logs" + + def test_preserves_path(self): + url = build_ws_url("https://host.example.com/some/deep/path") + assert url == "wss://host.example.com/some/deep/path/logs" + + def test_strips_trailing_slash(self): + url = build_ws_url("https://host.example.com/api/v1/") + assert url == "wss://host.example.com/api/v1/logs" + + def test_replaces_only_first_occurrence(self): + url = build_ws_url("https://proxy.example.com/redirect/https://target.com") + assert url == "wss://proxy.example.com/redirect/https://target.com/logs" + + +class TestGetDeveloperApiToken: + def test_success(self): + mock_cursor = mock.Mock() + mock_cursor.fetchone.return_value = ( + '{"token": "abc123", "resourceUri": "https://my-app.snowflakecomputing.com"}', + ) + + mock_conn = mock.Mock() + mock_conn.cursor.return_value = mock_cursor + + result = get_developer_api_token(mock_conn, "DB.SCHEMA.APP") + + assert isinstance(result, DeveloperApiToken) + assert result.token == "abc123" + assert result.resource_uri == "https://my-app.snowflakecomputing.com" + mock_cursor.execute.assert_called_once_with( + "CALL SYSTEM$GET_STREAMLIT_DEVELOPER_API_TOKEN('DB.SCHEMA.APP', false);" + ) + mock_cursor.close.assert_called_once() + + def test_empty_response_raises(self): + mock_cursor = mock.Mock() + mock_cursor.fetchone.return_value = None + + mock_conn = mock.Mock() + mock_conn.cursor.return_value = mock_cursor + + with pytest.raises(ClickException, match="Empty response"): + get_developer_api_token(mock_conn, "DB.SCHEMA.APP") + + def test_empty_token_raises(self): + mock_cursor = mock.Mock() + mock_cursor.fetchone.return_value = ( + '{"token": "", "resourceUri": "https://example.com"}', + ) + + mock_conn = mock.Mock() + mock_conn.cursor.return_value = mock_cursor + + with pytest.raises(ClickException, match="Empty token"): + get_developer_api_token(mock_conn, "DB.SCHEMA.APP") + + def test_empty_resource_uri_raises(self): + mock_cursor = mock.Mock() + mock_cursor.fetchone.return_value = ('{"token": "abc", "resourceUri": ""}',) + + mock_conn = mock.Mock() + mock_conn.cursor.return_value = mock_cursor + + with pytest.raises(ClickException, match="Empty resourceUri"): + get_developer_api_token(mock_conn, "DB.SCHEMA.APP") + + def test_single_quote_in_fqn_raises(self): + mock_conn = mock.Mock() + + with pytest.raises(ClickException, match="single quotes"): + get_developer_api_token(mock_conn, "DB.SCHEMA.APP'; DROP TABLE --") + + def test_cursor_closed_on_error(self): + mock_cursor = mock.Mock() + mock_cursor.execute.side_effect = Exception("SQL error") + + mock_conn = mock.Mock() + mock_conn.cursor.return_value = mock_cursor + + with pytest.raises(Exception, match="SQL error"): + get_developer_api_token(mock_conn, "DB.SCHEMA.APP") + + mock_cursor.close.assert_called_once() + + +class TestEncodeStreamLogsRequest: + def test_encodes_tail_lines(self): + data = encode_stream_logs_request(100) + assert isinstance(data, bytes) + assert len(data) > 0 + + def test_zero_tail_lines(self): + data = encode_stream_logs_request(0) + assert isinstance(data, bytes) + + def test_roundtrip_via_pb2(self): + """Verify encoding matches what the protobuf library produces.""" + from snowflake.cli._plugins.streamlit.proto.generated.developer.v1 import ( + logs_service_pb2 as pb2, + ) + + for tail_lines in [0, 1, 50, 100, 1000, 10000]: + encoded = encode_stream_logs_request(tail_lines) + decoded = pb2.StreamLogsRequest() + decoded.ParseFromString(encoded) + assert decoded.tail_lines == tail_lines + + +class TestDecodeLogEntry: + def _make_pb2_log_entry(self, log_source, content, seconds, nanos, sequence, level): + from snowflake.cli._plugins.streamlit.proto.generated.developer.v1 import ( + logs_service_pb2 as pb2, + ) + + entry = pb2.LogEntry() + entry.log_source = log_source + entry.content = content + entry.timestamp.seconds = seconds + entry.timestamp.nanos = nanos + entry.sequence = sequence + entry.level = level + return entry.SerializeToString() + + def test_decode_app_log(self): + data = self._make_pb2_log_entry( + log_source=1, # LOG_SOURCE_APP + content="Hello from app", + seconds=1700000000, + nanos=500000000, + sequence=42, + level=2, # LOG_LEVEL_INFO + ) + entry = decode_log_entry(data) + + assert entry.log_source == LOG_SOURCE_APP + assert entry.content == "Hello from app" + assert entry.sequence == 42 + assert entry.level == LOG_LEVEL_INFO + assert entry.source_label == "APP" + assert entry.level_label == "INFO" + + def test_decode_manager_log(self): + data = self._make_pb2_log_entry( + log_source=2, # LOG_SOURCE_MANAGER + content="Manager message", + seconds=1700000000, + nanos=0, + sequence=1, + level=3, # LOG_LEVEL_WARN + ) + entry = decode_log_entry(data) + + assert entry.log_source == LOG_SOURCE_MANAGER + assert entry.content == "Manager message" + assert entry.source_label == "MGR" + assert entry.level_label == "WARN" + + def test_format_line_includes_level(self): + entry = LogEntry( + log_source=LOG_SOURCE_APP, + content="test message", + timestamp=datetime(2024, 1, 15, 10, 30, 45, 123000, tzinfo=timezone.utc), + sequence=7, + level=LOG_LEVEL_INFO, + ) + line = entry.format_line() + assert line == "[2024-01-15 10:30:45.123] [INFO] [APP] [seq:7] test message" + + def test_format_line_warn_level(self): + entry = LogEntry( + log_source=LOG_SOURCE_MANAGER, + content="warning msg", + timestamp=datetime(2024, 6, 1, 12, 0, 0, 0, tzinfo=timezone.utc), + sequence=99, + level=LOG_LEVEL_WARN, + ) + line = entry.format_line() + assert line == "[2024-06-01 12:00:00.000] [WARN] [MGR] [seq:99] warning msg" + + def test_to_dict(self): + entry = LogEntry( + log_source=LOG_SOURCE_APP, + content="some content", + timestamp=datetime(2024, 3, 10, 8, 0, 0, tzinfo=timezone.utc), + sequence=5, + level=LOG_LEVEL_INFO, + ) + d = entry.to_dict() + assert d == { + "timestamp": "2024-03-10T08:00:00+00:00", + "level": "INFO", + "source": "APP", + "sequence": 5, + "content": "some content", + } + + +class TestStreamLogs: + def _make_entry_bytes(self, log_source, content, seconds, sequence, level): + from snowflake.cli._plugins.streamlit.proto.generated.developer.v1 import ( + logs_service_pb2 as pb2, + ) + + entry = pb2.LogEntry( + log_source=log_source, content=content, sequence=sequence, level=level + ) + entry.timestamp.seconds = seconds + return entry.SerializeToString() + + def _mock_conn_with_token(self): + """Return a mock connection that returns a valid token response.""" + mock_cursor = mock.Mock() + mock_cursor.fetchone.return_value = ( + '{"token": "test-token", "resourceUri": "https://test.snowflakecomputing.com/api"}', + ) + mock_conn = mock.Mock() + mock_conn.cursor.return_value = mock_cursor + return mock_conn + + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") + def test_streams_log_entries_to_stdout(self, mock_ws_module, mock_console, capsys): + import websocket as ws_lib + + mock_ws = mock.Mock() + mock_ws_module.WebSocket.return_value = mock_ws + mock_ws_module.ABNF = ws_lib.ABNF + mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException + mock_ws_module.WebSocketConnectionClosedException = ( + ws_lib.WebSocketConnectionClosedException + ) + mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL + + entry1 = self._make_entry_bytes(1, "line one", 1700000000, 1, 2) + entry2 = self._make_entry_bytes(2, "line two", 1700000001, 2, 3) + + mock_ws.recv_data.side_effect = [ + (ws_lib.ABNF.OPCODE_BINARY, entry1), + (ws_lib.ABNF.OPCODE_BINARY, entry2), + (ws_lib.ABNF.OPCODE_CLOSE, b""), + ] + + conn = self._mock_conn_with_token() + stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) + + captured = capsys.readouterr() + assert "line one" in captured.out + assert "line two" in captured.out + assert "[APP]" in captured.out + assert "[MGR]" in captured.out + + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") + def test_json_output(self, mock_ws_module, mock_console, capsys): + import json + + import websocket as ws_lib + + mock_ws = mock.Mock() + mock_ws_module.WebSocket.return_value = mock_ws + mock_ws_module.ABNF = ws_lib.ABNF + mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException + mock_ws_module.WebSocketConnectionClosedException = ( + ws_lib.WebSocketConnectionClosedException + ) + mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL + + entry_bytes = self._make_entry_bytes(1, "json test", 1700000000, 1, 2) + + mock_ws.recv_data.side_effect = [ + (ws_lib.ABNF.OPCODE_BINARY, entry_bytes), + (ws_lib.ABNF.OPCODE_CLOSE, b""), + ] + + conn = self._mock_conn_with_token() + stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=50, json_output=True) + + captured = capsys.readouterr() + # Skip the "---" header line and the trailing "--- Log streaming stopped." + json_lines = [ + line for line in captured.out.strip().split("\n") if line.startswith("{") + ] + assert len(json_lines) == 1 + parsed = json.loads(json_lines[0]) + assert parsed["content"] == "json test" + assert parsed["source"] == "APP" + assert parsed["level"] == "INFO" + + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") + def test_handles_connection_closed(self, mock_ws_module, mock_console, capsys): + import websocket as ws_lib + + mock_ws = mock.Mock() + mock_ws_module.WebSocket.return_value = mock_ws + mock_ws_module.ABNF = ws_lib.ABNF + mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException + mock_ws_module.WebSocketConnectionClosedException = ( + ws_lib.WebSocketConnectionClosedException + ) + mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL + + mock_ws.recv_data.side_effect = ws_lib.WebSocketConnectionClosedException() + + conn = self._mock_conn_with_token() + stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) + + captured = capsys.readouterr() + assert "Log streaming stopped" in captured.out + + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") + def test_timeout_continues_loop(self, mock_ws_module, mock_console, capsys): + import websocket as ws_lib + + mock_ws = mock.Mock() + mock_ws_module.WebSocket.return_value = mock_ws + mock_ws_module.ABNF = ws_lib.ABNF + mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException + mock_ws_module.WebSocketConnectionClosedException = ( + ws_lib.WebSocketConnectionClosedException + ) + mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL + + entry_bytes = self._make_entry_bytes(1, "after timeout", 1700000000, 1, 2) + + # Timeout once, then get a message, then close + mock_ws.recv_data.side_effect = [ + ws_lib.WebSocketTimeoutException(), + (ws_lib.ABNF.OPCODE_BINARY, entry_bytes), + (ws_lib.ABNF.OPCODE_CLOSE, b""), + ] + + conn = self._mock_conn_with_token() + stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) + + captured = capsys.readouterr() + assert "after timeout" in captured.out + + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") + def test_graceful_close(self, mock_ws_module, mock_console): + import websocket as ws_lib + + mock_ws = mock.Mock() + mock_ws_module.WebSocket.return_value = mock_ws + mock_ws_module.ABNF = ws_lib.ABNF + mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException + mock_ws_module.WebSocketConnectionClosedException = ( + ws_lib.WebSocketConnectionClosedException + ) + mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL + + mock_ws.recv_data.side_effect = [ + (ws_lib.ABNF.OPCODE_CLOSE, b""), + ] + + conn = self._mock_conn_with_token() + stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) + + mock_ws.close.assert_called_once_with(status=ws_lib.STATUS_NORMAL) + + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") + def test_skips_malformed_protobuf(self, mock_ws_module, mock_console, capsys): + import websocket as ws_lib + + mock_ws = mock.Mock() + mock_ws_module.WebSocket.return_value = mock_ws + mock_ws_module.ABNF = ws_lib.ABNF + mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException + mock_ws_module.WebSocketConnectionClosedException = ( + ws_lib.WebSocketConnectionClosedException + ) + mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL + + good_entry = self._make_entry_bytes(1, "good line", 1700000000, 1, 2) + + mock_ws.recv_data.side_effect = [ + (ws_lib.ABNF.OPCODE_BINARY, b"\xff\xff\xff"), # invalid protobuf + (ws_lib.ABNF.OPCODE_BINARY, good_entry), + (ws_lib.ABNF.OPCODE_CLOSE, b""), + ] + + conn = self._mock_conn_with_token() + stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) + + captured = capsys.readouterr() + # The malformed entry is skipped but the good entry still shows + assert "good line" in captured.out + + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") + def test_responds_to_ping(self, mock_ws_module, mock_console): + import websocket as ws_lib + + mock_ws = mock.Mock() + mock_ws_module.WebSocket.return_value = mock_ws + mock_ws_module.ABNF = ws_lib.ABNF + mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException + mock_ws_module.WebSocketConnectionClosedException = ( + ws_lib.WebSocketConnectionClosedException + ) + mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL + + mock_ws.recv_data.side_effect = [ + (ws_lib.ABNF.OPCODE_PING, b"ping-data"), + (ws_lib.ABNF.OPCODE_CLOSE, b""), + ] + + conn = self._mock_conn_with_token() + stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) + + mock_ws.pong.assert_called_once_with(b"ping-data") From 81c84746e0660dbb3f1bedaf1cc23dcf218473e8 Mon Sep 17 00:00:00 2001 From: Sandeep Kumta Vishnu Date: Sat, 21 Feb 2026 18:44:54 -0800 Subject: [PATCH 4/8] fix: address self-review findings for streamlit logs command Consolidate try/finally so ws.close() runs on connect failure, fix MAX_TAIL_LINES to match proto spec (1000 not 10000), add streaming flag to suppress misleading output on early errors, add missing tests/streamlit/__init__.py, and add 3 new tests for KeyboardInterrupt, connect failure, and send_binary verification (29 total). --- .../cli/_plugins/streamlit/commands.py | 2 +- .../cli/_plugins/streamlit/log_streaming.py | 17 +++-- tests/streamlit/__init__.py | 13 ++++ tests/streamlit/test_streamlit_logs.py | 75 +++++++++++++++++++ 4 files changed, 99 insertions(+), 8 deletions(-) create mode 100644 tests/streamlit/__init__.py diff --git a/src/snowflake/cli/_plugins/streamlit/commands.py b/src/snowflake/cli/_plugins/streamlit/commands.py index 7339adc967..2523df6217 100644 --- a/src/snowflake/cli/_plugins/streamlit/commands.py +++ b/src/snowflake/cli/_plugins/streamlit/commands.py @@ -224,7 +224,7 @@ def streamlit_logs( "--tail", "-n", min=0, - max=10000, + max=1000, # server-side buffer size limit (see logs_service.proto) help="Number of historical log lines to fetch. Use 0 for live logs only.", ), **options, diff --git a/src/snowflake/cli/_plugins/streamlit/log_streaming.py b/src/snowflake/cli/_plugins/streamlit/log_streaming.py index dc96b0ead5..2caf61ff5d 100644 --- a/src/snowflake/cli/_plugins/streamlit/log_streaming.py +++ b/src/snowflake/cli/_plugins/streamlit/log_streaming.py @@ -39,7 +39,7 @@ log = logging.getLogger(__name__) DEFAULT_TAIL_LINES = 100 -MAX_TAIL_LINES = 10000 +MAX_TAIL_LINES = 1000 # Timeout for each ws.recv_data() call — mirrors the Go client's 90-second # read deadline. When no log entry arrives within this window, we re-issue @@ -130,13 +130,14 @@ def stream_logs( header = [f'Authorization: Snowflake Token="{token_info.token}"'] ws = websocket.WebSocket() ws.timeout = _WS_RECV_TIMEOUT_SECONDS + streaming = False try: - ws.connect(ws_url, header=header, timeout=_HANDSHAKE_TIMEOUT_SECONDS) - except Exception as e: - raise ClickException(f"Failed to connect to log stream: {e}") from e + try: + ws.connect(ws_url, header=header, timeout=_HANDSHAKE_TIMEOUT_SECONDS) + except Exception as e: + raise ClickException(f"Failed to connect to log stream: {e}") from e - try: # 4. Send StreamLogsRequest ws.send_binary(encode_stream_logs_request(tail_lines)) log.debug("Sent StreamLogsRequest with tail_lines=%d", tail_lines) @@ -144,6 +145,7 @@ def stream_logs( cli_console.step(f"Streaming logs (tail={tail_lines}). Press Ctrl+C to stop.") sys.stdout.write("---\n") sys.stdout.flush() + streaming = True # 5. Read loop while True: @@ -183,5 +185,6 @@ def stream_logs( ws.close(status=websocket.STATUS_NORMAL) except Exception: pass - sys.stdout.write("\n--- Log streaming stopped.\n") - sys.stdout.flush() + if streaming: + sys.stdout.write("\n--- Log streaming stopped.\n") + sys.stdout.flush() diff --git a/tests/streamlit/__init__.py b/tests/streamlit/__init__.py new file mode 100644 index 0000000000..ada0a4e13d --- /dev/null +++ b/tests/streamlit/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/streamlit/test_streamlit_logs.py b/tests/streamlit/test_streamlit_logs.py index 682554f0ca..f01c3e9b0f 100644 --- a/tests/streamlit/test_streamlit_logs.py +++ b/tests/streamlit/test_streamlit_logs.py @@ -456,3 +456,78 @@ def test_responds_to_ping(self, mock_ws_module, mock_console): stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) mock_ws.pong.assert_called_once_with(b"ping-data") + + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") + def test_keyboard_interrupt_prints_stopped( + self, mock_ws_module, mock_console, capsys + ): + import websocket as ws_lib + + mock_ws = mock.Mock() + mock_ws_module.WebSocket.return_value = mock_ws + mock_ws_module.ABNF = ws_lib.ABNF + mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException + mock_ws_module.WebSocketConnectionClosedException = ( + ws_lib.WebSocketConnectionClosedException + ) + mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL + + mock_ws.recv_data.side_effect = KeyboardInterrupt() + + conn = self._mock_conn_with_token() + stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) + + captured = capsys.readouterr() + assert "Log streaming stopped" in captured.out + mock_ws.close.assert_called_once_with(status=ws_lib.STATUS_NORMAL) + + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") + def test_connect_failure_raises(self, mock_ws_module, mock_console): + import websocket as ws_lib + + mock_ws = mock.Mock() + mock_ws_module.WebSocket.return_value = mock_ws + mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL + mock_ws.connect.side_effect = ConnectionRefusedError("Connection refused") + + conn = self._mock_conn_with_token() + with pytest.raises(ClickException, match="Failed to connect"): + stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) + + # WebSocket should still be closed in the finally block + mock_ws.close.assert_called_once_with(status=ws_lib.STATUS_NORMAL) + + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") + def test_sends_stream_logs_request(self, mock_ws_module, mock_console): + import websocket as ws_lib + + mock_ws = mock.Mock() + mock_ws_module.WebSocket.return_value = mock_ws + mock_ws_module.ABNF = ws_lib.ABNF + mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException + mock_ws_module.WebSocketConnectionClosedException = ( + ws_lib.WebSocketConnectionClosedException + ) + mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL + + mock_ws.recv_data.side_effect = [ + (ws_lib.ABNF.OPCODE_CLOSE, b""), + ] + + conn = self._mock_conn_with_token() + stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=42) + + mock_ws.send_binary.assert_called_once() + sent_bytes = mock_ws.send_binary.call_args[0][0] + + # Verify the sent bytes decode to a StreamLogsRequest with tail_lines=42 + from snowflake.cli._plugins.streamlit.proto.generated.developer.v1 import ( + logs_service_pb2 as pb2, + ) + + request = pb2.StreamLogsRequest() + request.ParseFromString(sent_bytes) + assert request.tail_lines == 42 From 7261f57d3b0cab9e814a2cb18113a464a7a4a424 Mon Sep 17 00:00:00 2001 From: Sandeep Kumta Vishnu Date: Sun, 22 Feb 2026 12:24:00 -0800 Subject: [PATCH 5/8] feat: add --name flag and SPCSv2 runtime validation to streamlit logs Add --name option to `snow streamlit logs` so users can specify a Streamlit app by fully qualified name without requiring snowflake.yml. When --name is provided, the command validates SPCSv2 runtime via server-side DESCRIBE STREAMLIT. When using project definition, it validates from entity_model.runtime_name locally. --- .../cli/_plugins/streamlit/commands.py | 68 ++++++++++++---- .../cli/_plugins/streamlit/log_streaming.py | 37 +++++++++ tests/streamlit/test_streamlit_logs.py | 81 +++++++++++++++++++ 3 files changed, 169 insertions(+), 17 deletions(-) diff --git a/src/snowflake/cli/_plugins/streamlit/commands.py b/src/snowflake/cli/_plugins/streamlit/commands.py index 2523df6217..64ce96fa48 100644 --- a/src/snowflake/cli/_plugins/streamlit/commands.py +++ b/src/snowflake/cli/_plugins/streamlit/commands.py @@ -33,6 +33,7 @@ with_project_definition, ) from snowflake.cli.api.commands.flags import ( + IdentifierType, PruneOption, ReplaceOption, entity_argument, @@ -216,9 +217,16 @@ def get_url( @app.command("logs", requires_connection=True) -@with_project_definition() +@with_project_definition(is_optional=True) def streamlit_logs( entity_id: str = entity_argument("streamlit"), + name: FQN = typer.Option( + None, + "--name", + help="Fully qualified name of the Streamlit app (e.g. my_app, schema.my_app, or db.schema.my_app). " + "Overrides the project definition when provided.", + click_type=IdentifierType(), + ), tail: int = typer.Option( 100, "--tail", @@ -232,33 +240,59 @@ def streamlit_logs( """ Streams live logs from a deployed Streamlit app to your terminal. - Reads the Streamlit app name from the project definition file (snowflake.yml). - Connects to the app's developer log service via WebSocket and prints - log entries in real time. Press Ctrl+C to stop streaming. + Reads the Streamlit app name from the project definition file (snowflake.yml) + or from the --name option. Connects to the app's developer log service via + WebSocket and prints log entries in real time. Press Ctrl+C to stop streaming. + + Log streaming requires SPCSv2 runtime. """ from snowflake.cli._plugins.streamlit.log_streaming import ( stream_logs, + validate_spcs_v2_runtime, + ) + from snowflake.cli._plugins.streamlit.streamlit_entity_model import ( + SPCS_RUNTIME_V2_NAME, ) cli_context = get_cli_context() conn = cli_context.connection - pd = cli_context.project_definition - if not pd.meets_version_requirement("2"): - if not pd.streamlit: - raise NoProjectDefinitionError( - project_type="streamlit", project_root=cli_context.project_root + if name is not None: + # --name flag provided: resolve FQN and validate via server-side DESCRIBE + fqn = name.using_connection(conn) + validate_spcs_v2_runtime(conn, str(fqn)) + else: + # No --name: require project definition + pd = cli_context.project_definition + if pd is None: + raise ClickException( + "No Streamlit app specified. Provide --name or run from a " + "directory with a snowflake.yml project definition." ) - pd = convert_project_definition_to_v2(cli_context.project_root, pd) + if not pd.meets_version_requirement("2"): + if not pd.streamlit: + raise NoProjectDefinitionError( + project_type="streamlit", project_root=cli_context.project_root + ) + pd = convert_project_definition_to_v2(cli_context.project_root, pd) + + entity_model = get_entity_for_operation( + cli_context=cli_context, + entity_id=entity_id, + project_definition=pd, + entity_type=ObjectType.STREAMLIT.value.cli_name, + ) - entity_model = get_entity_for_operation( - cli_context=cli_context, - entity_id=entity_id, - project_definition=pd, - entity_type=ObjectType.STREAMLIT.value.cli_name, - ) + # Validate SPCSv2 runtime from entity model + if entity_model.runtime_name != SPCS_RUNTIME_V2_NAME: + raise ClickException( + f"Log streaming is only supported for Streamlit apps running on " + f"SPCSv2 runtime ({SPCS_RUNTIME_V2_NAME}). " + f"Entity '{entity_id or entity_model.fqn}' has " + f"runtime_name='{entity_model.runtime_name}'." + ) - fqn = entity_model.fqn.using_connection(conn) + fqn = entity_model.fqn.using_connection(conn) stream_logs( conn=conn, diff --git a/src/snowflake/cli/_plugins/streamlit/log_streaming.py b/src/snowflake/cli/_plugins/streamlit/log_streaming.py index 2caf61ff5d..1e6ad8c0ac 100644 --- a/src/snowflake/cli/_plugins/streamlit/log_streaming.py +++ b/src/snowflake/cli/_plugins/streamlit/log_streaming.py @@ -105,6 +105,43 @@ def build_ws_url(resource_uri: str) -> str: return ws_url.rstrip("/") + "/logs" +def validate_spcs_v2_runtime(conn: SnowflakeConnection, fqn: str) -> None: + """ + Run DESCRIBE STREAMLIT and verify the app uses SPCSv2 runtime. + + Raises ClickException if the app does not use the SPCS Runtime V2 + (required for log streaming). + """ + from snowflake.cli._plugins.streamlit.streamlit_entity_model import ( + SPCS_RUNTIME_V2_NAME, + ) + + cursor = conn.cursor() + try: + cursor.execute(f"DESCRIBE STREAMLIT {fqn}") + row = cursor.fetchone() + description = cursor.description + finally: + cursor.close() + + if not row or not description: + raise ClickException( + f"Could not describe Streamlit app {fqn}. " + "Verify the app exists and you have access." + ) + + # Build column-name -> value mapping from cursor.description + columns = {desc[0].lower(): val for desc, val in zip(description, row)} + runtime_name = columns.get("runtime_name") + + if runtime_name != SPCS_RUNTIME_V2_NAME: + raise ClickException( + f"Log streaming is only supported for Streamlit apps running on " + f"SPCSv2 runtime ({SPCS_RUNTIME_V2_NAME}). " + f"App '{fqn}' has runtime_name='{runtime_name}'." + ) + + def stream_logs( conn: SnowflakeConnection, fqn: str, diff --git a/tests/streamlit/test_streamlit_logs.py b/tests/streamlit/test_streamlit_logs.py index f01c3e9b0f..c4514b7570 100644 --- a/tests/streamlit/test_streamlit_logs.py +++ b/tests/streamlit/test_streamlit_logs.py @@ -22,6 +22,7 @@ build_ws_url, get_developer_api_token, stream_logs, + validate_spcs_v2_runtime, ) from snowflake.cli._plugins.streamlit.proto_codec import ( LOG_LEVEL_INFO, @@ -531,3 +532,83 @@ def test_sends_stream_logs_request(self, mock_ws_module, mock_console): request = pb2.StreamLogsRequest() request.ParseFromString(sent_bytes) assert request.tail_lines == 42 + + +class TestValidateSpcsV2Runtime: + SPCS_V2 = "SYSTEM$ST_CONTAINER_RUNTIME_PY3_11" + + def _mock_describe_cursor(self, runtime_name): + """Return a mock cursor whose DESCRIBE STREAMLIT result has the given runtime_name.""" + mock_cursor = mock.Mock() + # Simulate DESCRIBE STREAMLIT columns (subset relevant to our code) + mock_cursor.description = [ + ("title",), + ("main_file",), + ("query_warehouse",), + ("compute_pool",), + ("runtime_name",), + ("name",), + ] + mock_cursor.fetchone.return_value = ( + "My App", + "streamlit_app.py", + "WH", + "my_pool", + runtime_name, + "MY_APP", + ) + return mock_cursor + + def test_passes_for_spcs_v2_runtime(self): + mock_cursor = self._mock_describe_cursor(self.SPCS_V2) + mock_conn = mock.Mock() + mock_conn.cursor.return_value = mock_cursor + + # Should not raise + validate_spcs_v2_runtime(mock_conn, "DB.SCHEMA.MY_APP") + + mock_cursor.execute.assert_called_once_with( + "DESCRIBE STREAMLIT DB.SCHEMA.MY_APP" + ) + mock_cursor.close.assert_called_once() + + def test_raises_for_non_spcs_v2_runtime(self): + mock_cursor = self._mock_describe_cursor(None) + mock_conn = mock.Mock() + mock_conn.cursor.return_value = mock_cursor + + with pytest.raises(ClickException, match="only supported for Streamlit apps"): + validate_spcs_v2_runtime(mock_conn, "DB.SCHEMA.MY_APP") + + mock_cursor.close.assert_called_once() + + def test_raises_for_wrong_runtime_name(self): + mock_cursor = self._mock_describe_cursor("SOME_OTHER_RUNTIME") + mock_conn = mock.Mock() + mock_conn.cursor.return_value = mock_cursor + + with pytest.raises(ClickException, match="SOME_OTHER_RUNTIME"): + validate_spcs_v2_runtime(mock_conn, "DB.SCHEMA.MY_APP") + + def test_raises_for_empty_describe_result(self): + mock_cursor = mock.Mock() + mock_cursor.fetchone.return_value = None + mock_cursor.description = None + mock_conn = mock.Mock() + mock_conn.cursor.return_value = mock_cursor + + with pytest.raises(ClickException, match="Could not describe"): + validate_spcs_v2_runtime(mock_conn, "DB.SCHEMA.MY_APP") + + mock_cursor.close.assert_called_once() + + def test_cursor_closed_on_sql_error(self): + mock_cursor = mock.Mock() + mock_cursor.execute.side_effect = Exception("SQL error") + mock_conn = mock.Mock() + mock_conn.cursor.return_value = mock_cursor + + with pytest.raises(Exception, match="SQL error"): + validate_spcs_v2_runtime(mock_conn, "DB.SCHEMA.MY_APP") + + mock_cursor.close.assert_called_once() From c6ee3a781124ddf29e7f98b117a4522692893e37 Mon Sep 17 00:00:00 2001 From: Sandeep Kumta Vishnu Date: Sun, 22 Feb 2026 13:49:52 -0800 Subject: [PATCH 6/8] fix: address code review feedback for streamlit logs command MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Narrow broad except-Exception in recv loop to WebSocketException/OSError. Remove dead DEFAULT_TAIL_LINES/MAX_TAIL_LINES constants. Deduplicate SPCSv2 validation — both --name and project-definition paths now call validate_spcs_v2_runtime(). Error when both --name and entity_id are provided. Add command-level tests for streamlit_logs(). Log ws.close() errors at debug level instead of silencing. Extract WS mock boilerplate to pytest fixtures. Type to_dict return as dict[str, str | int]. Add safety comments for FQN validation and token header logging. --- .../cli/_plugins/streamlit/commands.py | 20 +- .../cli/_plugins/streamlit/log_streaming.py | 16 +- .../cli/_plugins/streamlit/proto_codec.py | 2 +- tests/streamlit/test_streamlit_logs.py | 343 +++++++++--------- 4 files changed, 195 insertions(+), 186 deletions(-) diff --git a/src/snowflake/cli/_plugins/streamlit/commands.py b/src/snowflake/cli/_plugins/streamlit/commands.py index 64ce96fa48..0367dba3dd 100644 --- a/src/snowflake/cli/_plugins/streamlit/commands.py +++ b/src/snowflake/cli/_plugins/streamlit/commands.py @@ -250,14 +250,17 @@ def streamlit_logs( stream_logs, validate_spcs_v2_runtime, ) - from snowflake.cli._plugins.streamlit.streamlit_entity_model import ( - SPCS_RUNTIME_V2_NAME, - ) cli_context = get_cli_context() conn = cli_context.connection if name is not None: + if entity_id is not None: + raise ClickException( + "Cannot specify both --name and an entity ID. " + "Use --name to identify the app directly, or use an " + "entity ID to reference a snowflake.yml definition." + ) # --name flag provided: resolve FQN and validate via server-side DESCRIBE fqn = name.using_connection(conn) validate_spcs_v2_runtime(conn, str(fqn)) @@ -283,16 +286,9 @@ def streamlit_logs( entity_type=ObjectType.STREAMLIT.value.cli_name, ) - # Validate SPCSv2 runtime from entity model - if entity_model.runtime_name != SPCS_RUNTIME_V2_NAME: - raise ClickException( - f"Log streaming is only supported for Streamlit apps running on " - f"SPCSv2 runtime ({SPCS_RUNTIME_V2_NAME}). " - f"Entity '{entity_id or entity_model.fqn}' has " - f"runtime_name='{entity_model.runtime_name}'." - ) - fqn = entity_model.fqn.using_connection(conn) + # Validate SPCSv2 runtime via server-side DESCRIBE (same path as --name) + validate_spcs_v2_runtime(conn, str(fqn)) stream_logs( conn=conn, diff --git a/src/snowflake/cli/_plugins/streamlit/log_streaming.py b/src/snowflake/cli/_plugins/streamlit/log_streaming.py index 1e6ad8c0ac..8caba10567 100644 --- a/src/snowflake/cli/_plugins/streamlit/log_streaming.py +++ b/src/snowflake/cli/_plugins/streamlit/log_streaming.py @@ -38,9 +38,6 @@ log = logging.getLogger(__name__) -DEFAULT_TAIL_LINES = 100 -MAX_TAIL_LINES = 1000 - # Timeout for each ws.recv_data() call — mirrors the Go client's 90-second # read deadline. When no log entry arrives within this window, we re-issue # recv_data() so the loop stays responsive to KeyboardInterrupt. @@ -118,6 +115,9 @@ def validate_spcs_v2_runtime(conn: SnowflakeConnection, fqn: str) -> None: cursor = conn.cursor() try: + # fqn is already validated by IdentifierType / FQN.using_connection — + # DESCRIBE uses identifier syntax, not string literals, so no + # single-quote injection risk. cursor.execute(f"DESCRIBE STREAMLIT {fqn}") row = cursor.fetchone() description = cursor.description @@ -145,7 +145,7 @@ def validate_spcs_v2_runtime(conn: SnowflakeConnection, fqn: str) -> None: def stream_logs( conn: SnowflakeConnection, fqn: str, - tail_lines: int = DEFAULT_TAIL_LINES, + tail_lines: int = 100, json_output: bool = False, ) -> None: """ @@ -164,6 +164,8 @@ def stream_logs( cli_console.step(f"Connecting to log stream: {ws_url}") # 3. Connect + # NOTE: Do not log `header` — it contains the auth token. Also be aware + # that websocket.enableTrace(True) will dump headers to stderr. header = [f'Authorization: Snowflake Token="{token_info.token}"'] ws = websocket.WebSocket() ws.timeout = _WS_RECV_TIMEOUT_SECONDS @@ -195,7 +197,7 @@ def stream_logs( except websocket.WebSocketConnectionClosedException: log.debug("WebSocket connection closed by server") break - except Exception as e: + except (websocket.WebSocketException, OSError) as e: log.debug("WebSocket recv error: %s", e) break @@ -220,8 +222,8 @@ def stream_logs( finally: try: ws.close(status=websocket.STATUS_NORMAL) - except Exception: - pass + except Exception as e: + log.debug("Error closing WebSocket: %s", e) if streaming: sys.stdout.write("\n--- Log streaming stopped.\n") sys.stdout.flush() diff --git a/src/snowflake/cli/_plugins/streamlit/proto_codec.py b/src/snowflake/cli/_plugins/streamlit/proto_codec.py index 95e22196ca..e930b42cb6 100644 --- a/src/snowflake/cli/_plugins/streamlit/proto_codec.py +++ b/src/snowflake/cli/_plugins/streamlit/proto_codec.py @@ -74,7 +74,7 @@ def format_line(self) -> str: ts = self.timestamp.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] return f"[{ts}] [{self.level_label}] [{self.source_label}] [seq:{self.sequence}] {self.content}" - def to_dict(self) -> dict: + def to_dict(self) -> dict[str, str | int]: return { "timestamp": self.timestamp.isoformat(), "level": self.level_label, diff --git a/tests/streamlit/test_streamlit_logs.py b/tests/streamlit/test_streamlit_logs.py index c4514b7570..cd34a2e525 100644 --- a/tests/streamlit/test_streamlit_logs.py +++ b/tests/streamlit/test_streamlit_logs.py @@ -16,6 +16,7 @@ from unittest import mock import pytest +import websocket as ws_lib from click import ClickException from snowflake.cli._plugins.streamlit.log_streaming import ( DeveloperApiToken, @@ -240,44 +241,60 @@ def test_to_dict(self): } -class TestStreamLogs: - def _make_entry_bytes(self, log_source, content, seconds, sequence, level): - from snowflake.cli._plugins.streamlit.proto.generated.developer.v1 import ( - logs_service_pb2 as pb2, - ) - - entry = pb2.LogEntry( - log_source=log_source, content=content, sequence=sequence, level=level - ) - entry.timestamp.seconds = seconds - return entry.SerializeToString() - - def _mock_conn_with_token(self): - """Return a mock connection that returns a valid token response.""" - mock_cursor = mock.Mock() - mock_cursor.fetchone.return_value = ( - '{"token": "test-token", "resourceUri": "https://test.snowflakecomputing.com/api"}', - ) - mock_conn = mock.Mock() - mock_conn.cursor.return_value = mock_cursor - return mock_conn - - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") - def test_streams_log_entries_to_stdout(self, mock_ws_module, mock_console, capsys): - import websocket as ws_lib - - mock_ws = mock.Mock() - mock_ws_module.WebSocket.return_value = mock_ws +def _make_entry_bytes(log_source, content, seconds, sequence, level): + """Serialize a protobuf LogEntry for use in tests.""" + from snowflake.cli._plugins.streamlit.proto.generated.developer.v1 import ( + logs_service_pb2 as pb2, + ) + + entry = pb2.LogEntry( + log_source=log_source, content=content, sequence=sequence, level=level + ) + entry.timestamp.seconds = seconds + return entry.SerializeToString() + + +def _mock_conn_with_token(): + """Return a mock connection that returns a valid token response.""" + mock_cursor = mock.Mock() + mock_cursor.fetchone.return_value = ( + '{"token": "test-token", "resourceUri": "https://test.snowflakecomputing.com/api"}', + ) + mock_conn = mock.Mock() + mock_conn.cursor.return_value = mock_cursor + return mock_conn + + +@pytest.fixture +def mock_ws(): + """Patch the websocket module in log_streaming and wire up real constants.""" + with mock.patch( + "snowflake.cli._plugins.streamlit.log_streaming.websocket" + ) as mock_ws_module: + ws = mock.Mock() + mock_ws_module.WebSocket.return_value = ws mock_ws_module.ABNF = ws_lib.ABNF mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException mock_ws_module.WebSocketConnectionClosedException = ( ws_lib.WebSocketConnectionClosedException ) + mock_ws_module.WebSocketException = ws_lib.WebSocketException mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL + yield ws + + +@pytest.fixture +def mock_console(): + with mock.patch( + "snowflake.cli._plugins.streamlit.log_streaming.cli_console" + ) as console: + yield console - entry1 = self._make_entry_bytes(1, "line one", 1700000000, 1, 2) - entry2 = self._make_entry_bytes(2, "line two", 1700000001, 2, 3) + +class TestStreamLogs: + def test_streams_log_entries_to_stdout(self, mock_ws, mock_console, capsys): + entry1 = _make_entry_bytes(1, "line one", 1700000000, 1, 2) + entry2 = _make_entry_bytes(2, "line two", 1700000001, 2, 3) mock_ws.recv_data.side_effect = [ (ws_lib.ABNF.OPCODE_BINARY, entry1), @@ -285,7 +302,7 @@ def test_streams_log_entries_to_stdout(self, mock_ws_module, mock_console, capsy (ws_lib.ABNF.OPCODE_CLOSE, b""), ] - conn = self._mock_conn_with_token() + conn = _mock_conn_with_token() stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) captured = capsys.readouterr() @@ -294,30 +311,17 @@ def test_streams_log_entries_to_stdout(self, mock_ws_module, mock_console, capsy assert "[APP]" in captured.out assert "[MGR]" in captured.out - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") - def test_json_output(self, mock_ws_module, mock_console, capsys): + def test_json_output(self, mock_ws, mock_console, capsys): import json - import websocket as ws_lib - - mock_ws = mock.Mock() - mock_ws_module.WebSocket.return_value = mock_ws - mock_ws_module.ABNF = ws_lib.ABNF - mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException - mock_ws_module.WebSocketConnectionClosedException = ( - ws_lib.WebSocketConnectionClosedException - ) - mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL - - entry_bytes = self._make_entry_bytes(1, "json test", 1700000000, 1, 2) + entry_bytes = _make_entry_bytes(1, "json test", 1700000000, 1, 2) mock_ws.recv_data.side_effect = [ (ws_lib.ABNF.OPCODE_BINARY, entry_bytes), (ws_lib.ABNF.OPCODE_CLOSE, b""), ] - conn = self._mock_conn_with_token() + conn = _mock_conn_with_token() stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=50, json_output=True) captured = capsys.readouterr() @@ -331,43 +335,17 @@ def test_json_output(self, mock_ws_module, mock_console, capsys): assert parsed["source"] == "APP" assert parsed["level"] == "INFO" - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") - def test_handles_connection_closed(self, mock_ws_module, mock_console, capsys): - import websocket as ws_lib - - mock_ws = mock.Mock() - mock_ws_module.WebSocket.return_value = mock_ws - mock_ws_module.ABNF = ws_lib.ABNF - mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException - mock_ws_module.WebSocketConnectionClosedException = ( - ws_lib.WebSocketConnectionClosedException - ) - mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL - + def test_handles_connection_closed(self, mock_ws, mock_console, capsys): mock_ws.recv_data.side_effect = ws_lib.WebSocketConnectionClosedException() - conn = self._mock_conn_with_token() + conn = _mock_conn_with_token() stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) captured = capsys.readouterr() assert "Log streaming stopped" in captured.out - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") - def test_timeout_continues_loop(self, mock_ws_module, mock_console, capsys): - import websocket as ws_lib - - mock_ws = mock.Mock() - mock_ws_module.WebSocket.return_value = mock_ws - mock_ws_module.ABNF = ws_lib.ABNF - mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException - mock_ws_module.WebSocketConnectionClosedException = ( - ws_lib.WebSocketConnectionClosedException - ) - mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL - - entry_bytes = self._make_entry_bytes(1, "after timeout", 1700000000, 1, 2) + def test_timeout_continues_loop(self, mock_ws, mock_console, capsys): + entry_bytes = _make_entry_bytes(1, "after timeout", 1700000000, 1, 2) # Timeout once, then get a message, then close mock_ws.recv_data.side_effect = [ @@ -376,50 +354,24 @@ def test_timeout_continues_loop(self, mock_ws_module, mock_console, capsys): (ws_lib.ABNF.OPCODE_CLOSE, b""), ] - conn = self._mock_conn_with_token() + conn = _mock_conn_with_token() stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) captured = capsys.readouterr() assert "after timeout" in captured.out - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") - def test_graceful_close(self, mock_ws_module, mock_console): - import websocket as ws_lib - - mock_ws = mock.Mock() - mock_ws_module.WebSocket.return_value = mock_ws - mock_ws_module.ABNF = ws_lib.ABNF - mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException - mock_ws_module.WebSocketConnectionClosedException = ( - ws_lib.WebSocketConnectionClosedException - ) - mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL - + def test_graceful_close(self, mock_ws, mock_console): mock_ws.recv_data.side_effect = [ (ws_lib.ABNF.OPCODE_CLOSE, b""), ] - conn = self._mock_conn_with_token() + conn = _mock_conn_with_token() stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) mock_ws.close.assert_called_once_with(status=ws_lib.STATUS_NORMAL) - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") - def test_skips_malformed_protobuf(self, mock_ws_module, mock_console, capsys): - import websocket as ws_lib - - mock_ws = mock.Mock() - mock_ws_module.WebSocket.return_value = mock_ws - mock_ws_module.ABNF = ws_lib.ABNF - mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException - mock_ws_module.WebSocketConnectionClosedException = ( - ws_lib.WebSocketConnectionClosedException - ) - mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL - - good_entry = self._make_entry_bytes(1, "good line", 1700000000, 1, 2) + def test_skips_malformed_protobuf(self, mock_ws, mock_console, capsys): + good_entry = _make_entry_bytes(1, "good line", 1700000000, 1, 2) mock_ws.recv_data.side_effect = [ (ws_lib.ABNF.OPCODE_BINARY, b"\xff\xff\xff"), # invalid protobuf @@ -427,98 +379,50 @@ def test_skips_malformed_protobuf(self, mock_ws_module, mock_console, capsys): (ws_lib.ABNF.OPCODE_CLOSE, b""), ] - conn = self._mock_conn_with_token() + conn = _mock_conn_with_token() stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) captured = capsys.readouterr() # The malformed entry is skipped but the good entry still shows assert "good line" in captured.out - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") - def test_responds_to_ping(self, mock_ws_module, mock_console): - import websocket as ws_lib - - mock_ws = mock.Mock() - mock_ws_module.WebSocket.return_value = mock_ws - mock_ws_module.ABNF = ws_lib.ABNF - mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException - mock_ws_module.WebSocketConnectionClosedException = ( - ws_lib.WebSocketConnectionClosedException - ) - mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL - + def test_responds_to_ping(self, mock_ws, mock_console): mock_ws.recv_data.side_effect = [ (ws_lib.ABNF.OPCODE_PING, b"ping-data"), (ws_lib.ABNF.OPCODE_CLOSE, b""), ] - conn = self._mock_conn_with_token() + conn = _mock_conn_with_token() stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) mock_ws.pong.assert_called_once_with(b"ping-data") - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") - def test_keyboard_interrupt_prints_stopped( - self, mock_ws_module, mock_console, capsys - ): - import websocket as ws_lib - - mock_ws = mock.Mock() - mock_ws_module.WebSocket.return_value = mock_ws - mock_ws_module.ABNF = ws_lib.ABNF - mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException - mock_ws_module.WebSocketConnectionClosedException = ( - ws_lib.WebSocketConnectionClosedException - ) - mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL - + def test_keyboard_interrupt_prints_stopped(self, mock_ws, mock_console, capsys): mock_ws.recv_data.side_effect = KeyboardInterrupt() - conn = self._mock_conn_with_token() + conn = _mock_conn_with_token() stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) captured = capsys.readouterr() assert "Log streaming stopped" in captured.out mock_ws.close.assert_called_once_with(status=ws_lib.STATUS_NORMAL) - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") - def test_connect_failure_raises(self, mock_ws_module, mock_console): - import websocket as ws_lib - - mock_ws = mock.Mock() - mock_ws_module.WebSocket.return_value = mock_ws - mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL + def test_connect_failure_raises(self, mock_ws, mock_console): mock_ws.connect.side_effect = ConnectionRefusedError("Connection refused") - conn = self._mock_conn_with_token() + conn = _mock_conn_with_token() with pytest.raises(ClickException, match="Failed to connect"): stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=100) # WebSocket should still be closed in the finally block mock_ws.close.assert_called_once_with(status=ws_lib.STATUS_NORMAL) - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.cli_console") - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.websocket") - def test_sends_stream_logs_request(self, mock_ws_module, mock_console): - import websocket as ws_lib - - mock_ws = mock.Mock() - mock_ws_module.WebSocket.return_value = mock_ws - mock_ws_module.ABNF = ws_lib.ABNF - mock_ws_module.WebSocketTimeoutException = ws_lib.WebSocketTimeoutException - mock_ws_module.WebSocketConnectionClosedException = ( - ws_lib.WebSocketConnectionClosedException - ) - mock_ws_module.STATUS_NORMAL = ws_lib.STATUS_NORMAL - + def test_sends_stream_logs_request(self, mock_ws, mock_console): mock_ws.recv_data.side_effect = [ (ws_lib.ABNF.OPCODE_CLOSE, b""), ] - conn = self._mock_conn_with_token() + conn = _mock_conn_with_token() stream_logs(conn=conn, fqn="DB.SCHEMA.APP", tail_lines=42) mock_ws.send_binary.assert_called_once() @@ -612,3 +516,110 @@ def test_cursor_closed_on_sql_error(self): validate_spcs_v2_runtime(mock_conn, "DB.SCHEMA.MY_APP") mock_cursor.close.assert_called_once() + + +SPCS_V2_NAME = "SYSTEM$ST_CONTAINER_RUNTIME_PY3_11" + + +class TestStreamlitLogsCommand: + """Tests for the streamlit_logs command handler in commands.py.""" + + @mock.patch("snowflake.cli._plugins.streamlit.commands.get_cli_context") + @mock.patch( + "snowflake.cli._plugins.streamlit.log_streaming.validate_spcs_v2_runtime" + ) + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.stream_logs") + def test_name_flag_resolves_fqn_and_validates( + self, mock_stream_logs, mock_validate, mock_get_ctx + ): + """When --name is provided, resolve FQN and validate via DESCRIBE.""" + from snowflake.cli._plugins.streamlit.commands import streamlit_logs + from snowflake.cli.api.identifiers import FQN + + mock_conn = mock.Mock() + mock_conn.database = "DB" + mock_conn.schema = "SCHEMA" + + mock_ctx = mock.Mock() + mock_ctx.connection = mock_conn + mock_ctx.output_format.is_json = False + mock_get_ctx.return_value = mock_ctx + + fqn = FQN.from_string("MY_APP") + resolved = fqn.using_connection(mock_conn) + + result = streamlit_logs(entity_id=None, name=fqn, tail=100) + + mock_validate.assert_called_once_with(mock_conn, str(resolved)) + mock_stream_logs.assert_called_once_with( + conn=mock_conn, + fqn=str(resolved), + tail_lines=100, + json_output=False, + ) + assert result.message == "Log streaming ended." + + @mock.patch("snowflake.cli._plugins.streamlit.commands.get_cli_context") + def test_name_and_entity_id_raises(self, mock_get_ctx): + """When both --name and entity_id are provided, raise an error.""" + from snowflake.cli._plugins.streamlit.commands import streamlit_logs + from snowflake.cli.api.identifiers import FQN + + mock_ctx = mock.Mock() + mock_ctx.connection = mock.Mock() + mock_get_ctx.return_value = mock_ctx + + with pytest.raises(ClickException, match="Cannot specify both"): + streamlit_logs( + entity_id="my_entity", name=FQN.from_string("MY_APP"), tail=100 + ) + + @mock.patch("snowflake.cli._plugins.streamlit.commands.get_cli_context") + def test_no_name_no_project_definition_raises(self, mock_get_ctx): + """When neither --name nor project definition is available, raise an error.""" + from snowflake.cli._plugins.streamlit.commands import streamlit_logs + + mock_ctx = mock.Mock() + mock_ctx.connection = mock.Mock() + mock_ctx.project_definition = None + mock_get_ctx.return_value = mock_ctx + + with pytest.raises(ClickException, match="No Streamlit app specified"): + streamlit_logs(entity_id=None, name=None, tail=100) + + @mock.patch("snowflake.cli._plugins.streamlit.commands.get_cli_context") + @mock.patch("snowflake.cli._plugins.streamlit.commands.get_entity_for_operation") + @mock.patch( + "snowflake.cli._plugins.streamlit.log_streaming.validate_spcs_v2_runtime" + ) + @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.stream_logs") + def test_project_definition_path( + self, mock_stream_logs, mock_validate, mock_get_entity, mock_get_ctx + ): + """When using project definition, resolve entity and validate via DESCRIBE.""" + from snowflake.cli._plugins.streamlit.commands import streamlit_logs + from snowflake.cli.api.identifiers import FQN + + mock_conn = mock.Mock() + mock_conn.database = "DB" + mock_conn.schema = "PUBLIC" + + mock_pd = mock.Mock() + mock_pd.meets_version_requirement.return_value = True + + mock_ctx = mock.Mock() + mock_ctx.connection = mock_conn + mock_ctx.project_definition = mock_pd + mock_ctx.output_format.is_json = False + mock_get_ctx.return_value = mock_ctx + + mock_entity = mock.Mock() + mock_entity.fqn = FQN.from_string("DB.PUBLIC.MY_STREAMLIT") + mock_get_entity.return_value = mock_entity + + result = streamlit_logs(entity_id=None, name=None, tail=50) + + mock_validate.assert_called_once() + mock_stream_logs.assert_called_once() + assert mock_stream_logs.call_args.kwargs["tail_lines"] == 50 + assert result.message == "Log streaming ended." From 31942eef74f78de153c5caf52327fb5a0b60dca4 Mon Sep 17 00:00:00 2001 From: Sandeep Kumta Vishnu Date: Sun, 22 Feb 2026 14:56:39 -0800 Subject: [PATCH 7/8] chore: move local imports to top level and update copyright to 2026 Move all local imports to module-level in log_streaming.py, commands.py, and test_streamlit_logs.py. Update mock patch targets in command-level tests to match. Update copyright year from 2024 to 2026 in all new files added by this branch. --- .../cli/_plugins/streamlit/commands.py | 11 ++--- .../cli/_plugins/streamlit/log_streaming.py | 9 ++-- .../cli/_plugins/streamlit/proto/__init__.py | 2 +- .../streamlit/proto/generated/__init__.py | 2 +- .../proto/generated/developer/__init__.py | 2 +- .../proto/generated/developer/v1/__init__.py | 2 +- .../cli/_plugins/streamlit/proto_codec.py | 2 +- tests/streamlit/__init__.py | 2 +- tests/streamlit/test_streamlit_logs.py | 49 +++++-------------- 9 files changed, 26 insertions(+), 55 deletions(-) diff --git a/src/snowflake/cli/_plugins/streamlit/commands.py b/src/snowflake/cli/_plugins/streamlit/commands.py index 0367dba3dd..731a8d2749 100644 --- a/src/snowflake/cli/_plugins/streamlit/commands.py +++ b/src/snowflake/cli/_plugins/streamlit/commands.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 Snowflake Inc. +# Copyright (c) 2026 Snowflake Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -24,6 +24,10 @@ add_object_command_aliases, scope_option, ) +from snowflake.cli._plugins.streamlit.log_streaming import ( + stream_logs, + validate_spcs_v2_runtime, +) from snowflake.cli._plugins.streamlit.manager import StreamlitManager from snowflake.cli._plugins.streamlit.streamlit_entity import StreamlitEntity from snowflake.cli._plugins.workspace.context import ActionContext, WorkspaceContext @@ -246,11 +250,6 @@ def streamlit_logs( Log streaming requires SPCSv2 runtime. """ - from snowflake.cli._plugins.streamlit.log_streaming import ( - stream_logs, - validate_spcs_v2_runtime, - ) - cli_context = get_cli_context() conn = cli_context.connection diff --git a/src/snowflake/cli/_plugins/streamlit/log_streaming.py b/src/snowflake/cli/_plugins/streamlit/log_streaming.py index 8caba10567..c9b290c294 100644 --- a/src/snowflake/cli/_plugins/streamlit/log_streaming.py +++ b/src/snowflake/cli/_plugins/streamlit/log_streaming.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 Snowflake Inc. +# Copyright (c) 2026 Snowflake Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -33,6 +33,9 @@ decode_log_entry, encode_stream_logs_request, ) +from snowflake.cli._plugins.streamlit.streamlit_entity_model import ( + SPCS_RUNTIME_V2_NAME, +) from snowflake.cli.api.console import cli_console from snowflake.connector import SnowflakeConnection @@ -109,10 +112,6 @@ def validate_spcs_v2_runtime(conn: SnowflakeConnection, fqn: str) -> None: Raises ClickException if the app does not use the SPCS Runtime V2 (required for log streaming). """ - from snowflake.cli._plugins.streamlit.streamlit_entity_model import ( - SPCS_RUNTIME_V2_NAME, - ) - cursor = conn.cursor() try: # fqn is already validated by IdentifierType / FQN.using_connection — diff --git a/src/snowflake/cli/_plugins/streamlit/proto/__init__.py b/src/snowflake/cli/_plugins/streamlit/proto/__init__.py index ada0a4e13d..74bcb8a780 100644 --- a/src/snowflake/cli/_plugins/streamlit/proto/__init__.py +++ b/src/snowflake/cli/_plugins/streamlit/proto/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 Snowflake Inc. +# Copyright (c) 2026 Snowflake Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/snowflake/cli/_plugins/streamlit/proto/generated/__init__.py b/src/snowflake/cli/_plugins/streamlit/proto/generated/__init__.py index ada0a4e13d..74bcb8a780 100644 --- a/src/snowflake/cli/_plugins/streamlit/proto/generated/__init__.py +++ b/src/snowflake/cli/_plugins/streamlit/proto/generated/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 Snowflake Inc. +# Copyright (c) 2026 Snowflake Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/__init__.py b/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/__init__.py index ada0a4e13d..74bcb8a780 100644 --- a/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/__init__.py +++ b/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 Snowflake Inc. +# Copyright (c) 2026 Snowflake Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/__init__.py b/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/__init__.py index ada0a4e13d..74bcb8a780 100644 --- a/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/__init__.py +++ b/src/snowflake/cli/_plugins/streamlit/proto/generated/developer/v1/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 Snowflake Inc. +# Copyright (c) 2026 Snowflake Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/snowflake/cli/_plugins/streamlit/proto_codec.py b/src/snowflake/cli/_plugins/streamlit/proto_codec.py index e930b42cb6..4d5f5482bf 100644 --- a/src/snowflake/cli/_plugins/streamlit/proto_codec.py +++ b/src/snowflake/cli/_plugins/streamlit/proto_codec.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 Snowflake Inc. +# Copyright (c) 2026 Snowflake Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/streamlit/__init__.py b/tests/streamlit/__init__.py index ada0a4e13d..74bcb8a780 100644 --- a/tests/streamlit/__init__.py +++ b/tests/streamlit/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 Snowflake Inc. +# Copyright (c) 2026 Snowflake Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/streamlit/test_streamlit_logs.py b/tests/streamlit/test_streamlit_logs.py index cd34a2e525..2b77c6066e 100644 --- a/tests/streamlit/test_streamlit_logs.py +++ b/tests/streamlit/test_streamlit_logs.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 Snowflake Inc. +# Copyright (c) 2026 Snowflake Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,12 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json from datetime import datetime, timezone from unittest import mock import pytest import websocket as ws_lib from click import ClickException +from snowflake.cli._plugins.streamlit.commands import streamlit_logs from snowflake.cli._plugins.streamlit.log_streaming import ( DeveloperApiToken, build_ws_url, @@ -25,6 +27,9 @@ stream_logs, validate_spcs_v2_runtime, ) +from snowflake.cli._plugins.streamlit.proto.generated.developer.v1 import ( + logs_service_pb2 as pb2, +) from snowflake.cli._plugins.streamlit.proto_codec import ( LOG_LEVEL_INFO, LOG_LEVEL_WARN, @@ -34,6 +39,7 @@ decode_log_entry, encode_stream_logs_request, ) +from snowflake.cli.api.identifiers import FQN class TestBuildWsUrl: @@ -141,10 +147,6 @@ def test_zero_tail_lines(self): def test_roundtrip_via_pb2(self): """Verify encoding matches what the protobuf library produces.""" - from snowflake.cli._plugins.streamlit.proto.generated.developer.v1 import ( - logs_service_pb2 as pb2, - ) - for tail_lines in [0, 1, 50, 100, 1000, 10000]: encoded = encode_stream_logs_request(tail_lines) decoded = pb2.StreamLogsRequest() @@ -154,10 +156,6 @@ def test_roundtrip_via_pb2(self): class TestDecodeLogEntry: def _make_pb2_log_entry(self, log_source, content, seconds, nanos, sequence, level): - from snowflake.cli._plugins.streamlit.proto.generated.developer.v1 import ( - logs_service_pb2 as pb2, - ) - entry = pb2.LogEntry() entry.log_source = log_source entry.content = content @@ -243,10 +241,6 @@ def test_to_dict(self): def _make_entry_bytes(log_source, content, seconds, sequence, level): """Serialize a protobuf LogEntry for use in tests.""" - from snowflake.cli._plugins.streamlit.proto.generated.developer.v1 import ( - logs_service_pb2 as pb2, - ) - entry = pb2.LogEntry( log_source=log_source, content=content, sequence=sequence, level=level ) @@ -312,8 +306,6 @@ def test_streams_log_entries_to_stdout(self, mock_ws, mock_console, capsys): assert "[MGR]" in captured.out def test_json_output(self, mock_ws, mock_console, capsys): - import json - entry_bytes = _make_entry_bytes(1, "json test", 1700000000, 1, 2) mock_ws.recv_data.side_effect = [ @@ -429,10 +421,6 @@ def test_sends_stream_logs_request(self, mock_ws, mock_console): sent_bytes = mock_ws.send_binary.call_args[0][0] # Verify the sent bytes decode to a StreamLogsRequest with tail_lines=42 - from snowflake.cli._plugins.streamlit.proto.generated.developer.v1 import ( - logs_service_pb2 as pb2, - ) - request = pb2.StreamLogsRequest() request.ParseFromString(sent_bytes) assert request.tail_lines == 42 @@ -525,17 +513,12 @@ class TestStreamlitLogsCommand: """Tests for the streamlit_logs command handler in commands.py.""" @mock.patch("snowflake.cli._plugins.streamlit.commands.get_cli_context") - @mock.patch( - "snowflake.cli._plugins.streamlit.log_streaming.validate_spcs_v2_runtime" - ) - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.stream_logs") + @mock.patch("snowflake.cli._plugins.streamlit.commands.validate_spcs_v2_runtime") + @mock.patch("snowflake.cli._plugins.streamlit.commands.stream_logs") def test_name_flag_resolves_fqn_and_validates( self, mock_stream_logs, mock_validate, mock_get_ctx ): """When --name is provided, resolve FQN and validate via DESCRIBE.""" - from snowflake.cli._plugins.streamlit.commands import streamlit_logs - from snowflake.cli.api.identifiers import FQN - mock_conn = mock.Mock() mock_conn.database = "DB" mock_conn.schema = "SCHEMA" @@ -562,9 +545,6 @@ def test_name_flag_resolves_fqn_and_validates( @mock.patch("snowflake.cli._plugins.streamlit.commands.get_cli_context") def test_name_and_entity_id_raises(self, mock_get_ctx): """When both --name and entity_id are provided, raise an error.""" - from snowflake.cli._plugins.streamlit.commands import streamlit_logs - from snowflake.cli.api.identifiers import FQN - mock_ctx = mock.Mock() mock_ctx.connection = mock.Mock() mock_get_ctx.return_value = mock_ctx @@ -577,8 +557,6 @@ def test_name_and_entity_id_raises(self, mock_get_ctx): @mock.patch("snowflake.cli._plugins.streamlit.commands.get_cli_context") def test_no_name_no_project_definition_raises(self, mock_get_ctx): """When neither --name nor project definition is available, raise an error.""" - from snowflake.cli._plugins.streamlit.commands import streamlit_logs - mock_ctx = mock.Mock() mock_ctx.connection = mock.Mock() mock_ctx.project_definition = None @@ -589,17 +567,12 @@ def test_no_name_no_project_definition_raises(self, mock_get_ctx): @mock.patch("snowflake.cli._plugins.streamlit.commands.get_cli_context") @mock.patch("snowflake.cli._plugins.streamlit.commands.get_entity_for_operation") - @mock.patch( - "snowflake.cli._plugins.streamlit.log_streaming.validate_spcs_v2_runtime" - ) - @mock.patch("snowflake.cli._plugins.streamlit.log_streaming.stream_logs") + @mock.patch("snowflake.cli._plugins.streamlit.commands.validate_spcs_v2_runtime") + @mock.patch("snowflake.cli._plugins.streamlit.commands.stream_logs") def test_project_definition_path( self, mock_stream_logs, mock_validate, mock_get_entity, mock_get_ctx ): """When using project definition, resolve entity and validate via DESCRIBE.""" - from snowflake.cli._plugins.streamlit.commands import streamlit_logs - from snowflake.cli.api.identifiers import FQN - mock_conn = mock.Mock() mock_conn.database = "DB" mock_conn.schema = "PUBLIC" From 854393bc827c2e8d11d01fd67842e85302415948 Mon Sep 17 00:00:00 2001 From: Sandeep Kumta Vishnu Date: Sun, 22 Feb 2026 16:13:14 -0800 Subject: [PATCH 8/8] fix: update help message snapshots for streamlit logs command --- tests/__snapshots__/test_help_messages.ambr | 164 ++++++++++++++++++++ 1 file changed, 164 insertions(+) diff --git a/tests/__snapshots__/test_help_messages.ambr b/tests/__snapshots__/test_help_messages.ambr index 328b75547a..0fc00e732d 100644 --- a/tests/__snapshots__/test_help_messages.ambr +++ b/tests/__snapshots__/test_help_messages.ambr @@ -22164,6 +22164,168 @@ +------------------------------------------------------------------------------+ + ''' +# --- +# name: test_help_messages[streamlit.logs] + ''' + + Usage: root streamlit logs [OPTIONS] [ENTITY_ID] + + Streams live logs from a deployed Streamlit app to your terminal. + + Reads the Streamlit app name from the project definition file (snowflake.yml) + or from the --name option. Connects to the app's developer log service via + WebSocket and prints log entries in real time. Press Ctrl+C to stop streaming. + + Log streaming requires SPCSv2 runtime. + + +- Arguments ------------------------------------------------------------------+ + | entity_id [ENTITY_ID] ID of streamlit entity. | + +------------------------------------------------------------------------------+ + +- Options --------------------------------------------------------------------+ + | --name TEXT Fully qualified name of the | + | Streamlit app (e.g. my_app, | + | schema.my_app, or | + | db.schema.my_app). Overrides | + | the project definition when | + | provided. | + | --tail -n INTEGER RANGE [0<=x<=1000] Number of historical log | + | lines to fetch. Use 0 for | + | live logs only. | + | [default: 100] | + | --project -p TEXT Path where the Snowflake | + | project is stored. Defaults | + | to the current working | + | directory. | + | --env TEXT String in the format | + | key=value. Overrides | + | variables from the env | + | section used for templates. | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value specified | + | for the connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. Overrides | + | the value specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value specified | + | for the connection. | + | --workload-identity-provider TEXT Workload identity provider | + | (AWS, AZURE, GCP, OIDC). | + | Overrides the value specified | + | for the connection | + | --private-key-file,--privat… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the connection. | + | --token TEXT OAuth token to use when | + | connecting to Snowflake. | + | --token-file-path TEXT Path to file with an OAuth | + | token to use when connecting | + | to Snowflake. | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value specified | + | for the connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses a connection defined | + | with command-line parameters, | + | instead of one defined in | + | config | + | --mfa-passcode TEXT Token to use for multi-factor | + | authentication (MFA) | + | --enable-diag Whether to generate a | + | connection diagnostic report. | + | --diag-log-path TEXT Path for the generated | + | report. Defaults to system | + | temporary directory. | + | --diag-allowlist-path TEXT Path to a JSON file that | + | contains allowlist | + | parameters. | + | --oauth-client-id TEXT Value of client id provided | + | by the Identity Provider for | + | Snowflake integration. | + | --oauth-client-secret TEXT Value of the client secret | + | provided by the Identity | + | Provider for Snowflake | + | integration. | + | --oauth-authorization-url TEXT Identity Provider endpoint | + | supplying the authorization | + | code to the driver. | + | --oauth-token-request-url TEXT Identity Provider endpoint | + | supplying the access tokens | + | to the driver. | + | --oauth-redirect-uri TEXT URI to use for authorization | + | code redirection. | + | --oauth-scope TEXT Scope requested in the | + | Identity Provider | + | authorization request. | + | --oauth-disable-pkce Disables Proof Key for Code | + | Exchange (PKCE). Default: | + | False. | + | --oauth-enable-refresh-toke… Enables a silent | + | re-authentication when the | + | actual access token becomes | + | outdated. Default: False. | + | --oauth-enable-single-use-r… Whether to opt-in to | + | single-use refresh token | + | semantics. Default: False. | + | --client-store-temporary-cr… Store the temporary | + | credential. | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON|JSON_EXT| Specifies the output | + | CSV] format. | + | [default: TABLE] | + | --verbose -v Displays log entries | + | for log levels info | + | and higher. | + | --debug Displays log entries | + | for log levels debug | + | and higher; debug logs | + | contain additional | + | information. | + | --silent Turns off intermediate | + | output to console. | + | --enhanced-exit-codes Differentiate exit | + | error codes based on | + | failure type. | + | [env var: | + | SNOWFLAKE_ENHANCED_EX… | + | --decimal-precision INTEGER Number of decimal | + | places to display for | + | decimal values. Uses | + | Python's default | + | precision if not | + | specified. [env var: | + | SNOWFLAKE_DECIMAL_PRE… | + +------------------------------------------------------------------------------+ + + ''' # --- # name: test_help_messages[streamlit.share] @@ -22331,6 +22493,7 @@ | execute Executes a streamlit in a headless mode. | | get-url Returns a URL to the specified Streamlit app | | list Lists all available streamlits. | + | logs Streams live logs from a deployed Streamlit app to your terminal. | | share Shares a Streamlit app with another role. | +------------------------------------------------------------------------------+ @@ -23111,6 +23274,7 @@ | execute Executes a streamlit in a headless mode. | | get-url Returns a URL to the specified Streamlit app | | list Lists all available streamlits. | + | logs Streams live logs from a deployed Streamlit app to your terminal. | | share Shares a Streamlit app with another role. | +------------------------------------------------------------------------------+