From 4eb213612b13ace6acc203050f9e58d4b269926d Mon Sep 17 00:00:00 2001 From: Jeong YunWon Date: Fri, 25 Apr 2025 16:47:20 +0900 Subject: [PATCH 01/11] revert a lot --- .cspell.dict/rust-more.txt | 2 - .github/workflows/ci.yaml | 2 +- Cargo.lock | 262 +++++++------ Cargo.toml | 9 +- Lib/colorsys.py | 2 +- Lib/graphlib.py | 2 +- Lib/linecache.py | 91 +---- Lib/pprint.py | 3 - Lib/queue.py | 60 +-- Lib/sched.py | 2 +- Lib/tarfile.py | 436 ++++----------------- Lib/test/test___all__.py | 54 ++- Lib/test/test_android.py | 448 --------------------- Lib/test/test_baseexception.py | 5 +- Lib/test/test_bz2.py | 6 - Lib/test/test_heapq.py | 1 + Lib/test/test_linecache.py | 80 ---- Lib/test/test_pprint.py | 298 +++++++++----- Lib/test/test_queue.py | 416 -------------------- Lib/test/test_sched.py | 23 +- Lib/test/test_shutil.py | 4 + Lib/test/test_wave.py | 227 ----------- Lib/test/test_webbrowser.py | 230 ++--------- Lib/wave.py | 663 -------------------------------- Lib/webbrowser.py | 357 ++++++++--------- README.md | 1 + common/Cargo.toml | 3 +- common/src/cmp.rs | 48 +++ common/src/lib.rs | 3 +- compiler/codegen/src/compile.rs | 120 +++--- compiler/codegen/src/error.rs | 8 - compiler/codegen/src/ir.rs | 3 - compiler/core/src/bytecode.rs | 7 +- scripts/fix_test.py | 137 ------- src/lib.rs | 2 - stdlib/Cargo.toml | 7 +- stdlib/src/bz2.rs | 151 +++++--- stdlib/src/lib.rs | 11 +- stdlib/src/mmap.rs | 18 +- stdlib/src/select.rs | 26 +- stdlib/src/zlib.rs | 347 +++++------------ vm/Cargo.toml | 6 +- vm/src/frame.rs | 8 + vm/src/function/number.rs | 2 +- vm/src/lib.rs | 1 + vm/src/stdlib/nt.rs | 21 - vm/src/stdlib/operator.rs | 8 +- vm/src/vm/mod.rs | 11 +- wasm/demo/package-lock.json | 6 +- 49 files changed, 1080 insertions(+), 3558 deletions(-) delete mode 100644 Lib/test/test_android.py delete mode 100644 Lib/test/test_wave.py delete mode 100644 Lib/wave.py create mode 100644 common/src/cmp.rs delete mode 100644 scripts/fix_test.py diff --git a/.cspell.dict/rust-more.txt b/.cspell.dict/rust-more.txt index e5d5ac86c4..e92f1ff8c1 100644 --- a/.cspell.dict/rust-more.txt +++ b/.cspell.dict/rust-more.txt @@ -74,8 +74,6 @@ unic unistd unraw unsync -wasip1 -wasip2 wasmbind wasmtime widestring diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4e51d48953..afd3201e28 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -16,7 +16,7 @@ concurrency: cancel-in-progress: true env: - CARGO_ARGS: --no-default-features --features stdlib,importlib,stdio,encodings,sqlite,ssl + CARGO_ARGS: --no-default-features --features stdlib,importlib,encodings,sqlite,ssl # Skip additional tests on Windows. They are checked on Linux and MacOS. # test_glob: many failing tests # test_io: many failing tests diff --git a/Cargo.lock b/Cargo.lock index 0255535c1b..9ffadb13f6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -57,12 +57,6 @@ dependencies = [ "libc", ] -[[package]] -name = "anes" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" - [[package]] name = "anstream" version = "0.6.18" @@ -149,6 +143,17 @@ dependencies = [ "bytemuck", ] +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + [[package]] name = "autocfg" version = "1.4.0" @@ -239,12 +244,12 @@ checksum = "b6b1fc10dbac614ebc03540c9dbd60e83887fda27794998c6528f1782047d540" [[package]] name = "bzip2" -version = "0.5.2" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49ecfb22d906f800d4fe833b6282cf4dc1c298f5057ca0b5445e5c209735ca47" +checksum = "bdb116a6ef3f6c3698828873ad02c3014b3c85cadb88496095628e3ef1e347f8" dependencies = [ "bzip2-sys", - "libbz2-rs-sys", + "libc", ] [[package]] @@ -325,33 +330,6 @@ dependencies = [ "windows-link", ] -[[package]] -name = "ciborium" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" -dependencies = [ - "ciborium-io", - "ciborium-ll", - "serde", -] - -[[package]] -name = "ciborium-io" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" - -[[package]] -name = "ciborium-ll" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" -dependencies = [ - "ciborium-io", - "half", -] - [[package]] name = "clang-sys" version = "1.8.1" @@ -365,29 +343,15 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.36" +version = "2.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2df961d8c8a0d08aa9945718ccf584145eee3f3aa06cddbeac12933781102e04" +checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" dependencies = [ - "clap_builder", -] - -[[package]] -name = "clap_builder" -version = "4.5.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "132dbda40fb6753878316a489d5a1242a8ef2f0d9e47ba01c951ea8aa7d013a5" -dependencies = [ - "anstyle", - "clap_lex", + "bitflags 1.3.2", + "textwrap 0.11.0", + "unicode-width 0.1.14", ] -[[package]] -name = "clap_lex" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" - [[package]] name = "clipboard-win" version = "5.4.0" @@ -439,12 +403,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "constant_time_eq" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d52eff69cd5e647efe296129160853a42795992097e8af39800e1060caeea9b" - [[package]] name = "core-foundation" version = "0.9.4" @@ -641,24 +599,24 @@ dependencies = [ [[package]] name = "criterion" -version = "0.5.1" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" +checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f" dependencies = [ - "anes", + "atty", "cast", - "ciborium", "clap", "criterion-plot", - "is-terminal", + "csv", "itertools 0.10.5", + "lazy_static 1.5.0", "num-traits", - "once_cell", "oorandom", "plotters", "rayon", "regex", "serde", + "serde_cbor", "serde_derive", "serde_json", "tinytemplate", @@ -667,9 +625,9 @@ dependencies = [ [[package]] name = "criterion-plot" -version = "0.5.0" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876" dependencies = [ "cast", "itertools 0.10.5", @@ -716,6 +674,18 @@ dependencies = [ "typenum", ] +[[package]] +name = "csv" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acdc4883a9c96732e4733212c01447ebd805833b7275a73ca3ee080fd77afdaf" +dependencies = [ + "csv-core", + "itoa", + "ryu", + "serde", +] + [[package]] name = "csv-core" version = "0.1.12" @@ -824,12 +794,6 @@ dependencies = [ "regex", ] -[[package]] -name = "env_home" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7f84e12ccf0a7ddc17a6c41c93326024c42920d7ee630d04950e6926645c0fe" - [[package]] name = "env_logger" version = "0.11.8" @@ -884,7 +848,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78" dependencies = [ "cfg-if", - "rustix", + "rustix 1.0.5", "windows-sys 0.59.0", ] @@ -894,7 +858,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fc2706461e1ee94f55cab2ed2e3d34ae9536cfa830358ef80acff1a3dacab30" dependencies = [ - "lazy_static", + "lazy_static 0.2.11", "serde", "serde_derive", "serde_json", @@ -931,7 +895,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece" dependencies = [ "crc32fast", - "libz-rs-sys", + "libz-rs-sys 0.5.0", "miniz_oxide", ] @@ -978,7 +942,7 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed7131e57abbde63513e0e6636f76668a1ca9798dcae2df4e283cae9ee83859e" dependencies = [ - "rustix", + "rustix 1.0.5", "windows-targets 0.52.6", ] @@ -1035,6 +999,12 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" +[[package]] +name = "half" +version = "1.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b43ede17f21864e81be2fa654110bf1e793774238d86ef8555c37e6519c0403" + [[package]] name = "half" version = "2.5.0" @@ -1062,15 +1032,18 @@ checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "hermit-abi" -version = "0.3.9" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] [[package]] name = "hermit-abi" -version = "0.5.0" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbd780fe5cc30f81464441920d82ac8740e2e46b29a6fad543ddd075229ce37e" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hex" @@ -1158,17 +1131,6 @@ dependencies = [ "syn 2.0.100", ] -[[package]] -name = "is-terminal" -version = "0.4.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" -dependencies = [ - "hermit-abi 0.5.0", - "libc", - "windows-sys 0.59.0", -] - [[package]] name = "is_terminal_polyfill" version = "1.70.1" @@ -1286,6 +1248,12 @@ version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73" +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + [[package]] name = "lexical-parse-float" version = "1.0.5" @@ -1322,12 +1290,6 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa0e2a1fcbe2f6be6c42e342259976206b383122fc152e872795338b5a3f3a7" -[[package]] -name = "libbz2-rs-sys" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0864a00c8d019e36216b69c2c4ce50b83b7bd966add3cf5ba554ec44f8bebcf5" - [[package]] name = "libc" version = "0.2.171" @@ -1390,13 +1352,22 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "libz-rs-sys" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "902bc563b5d65ad9bba616b490842ef0651066a1a1dc3ce1087113ffcb873c8d" +dependencies = [ + "zlib-rs 0.4.2", +] + [[package]] name = "libz-rs-sys" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6489ca9bd760fe9642d7644e827b0c9add07df89857b0416ee15c1cc1a3b8c5a" dependencies = [ - "zlib-rs", + "zlib-rs 0.5.0", ] [[package]] @@ -1405,6 +1376,12 @@ version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" +[[package]] +name = "linux-raw-sys" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + [[package]] name = "linux-raw-sys" version = "0.9.3" @@ -2009,12 +1986,9 @@ checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" [[package]] name = "radium" -version = "1.1.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db0b76288902db304c864a12046b73d2d895cc34a4bb8137baaeebe9978a072c" -dependencies = [ - "cfg-if", -] +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" [[package]] name = "radix_trie" @@ -2275,6 +2249,19 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" +[[package]] +name = "rustix" +version = "0.38.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" +dependencies = [ + "bitflags 2.9.0", + "errno", + "libc", + "linux-raw-sys 0.4.15", + "windows-sys 0.59.0", +] + [[package]] name = "rustix" version = "1.0.5" @@ -2284,7 +2271,7 @@ dependencies = [ "bitflags 2.9.0", "errno", "libc", - "linux-raw-sys", + "linux-raw-sys 0.9.3", "windows-sys 0.59.0", ] @@ -2360,6 +2347,7 @@ dependencies = [ "rustpython-wtf8", "siphasher", "unicode_names2", + "volatile", "widestring", "windows-sys 0.59.0", ] @@ -2423,7 +2411,7 @@ dependencies = [ "rustpython-doc", "syn 2.0.100", "syn-ext", - "textwrap", + "textwrap 0.16.2", ] [[package]] @@ -2508,7 +2496,7 @@ dependencies = [ "junction", "libc", "libsqlite3-sys", - "libz-rs-sys", + "libz-rs-sys 0.4.2", "mac_address", "malachite-bigint", "md-5", @@ -2528,7 +2516,7 @@ dependencies = [ "paste", "puruspe", "rand_core 0.9.3", - "rustix", + "rustix 0.38.44", "rustpython-common", "rustpython-derive", "rustpython-vm", @@ -2567,7 +2555,6 @@ dependencies = [ "caseless", "cfg-if", "chrono", - "constant_time_eq", "crossbeam-utils", "errno", "exitcode", @@ -2575,7 +2562,7 @@ dependencies = [ "flamer", "getrandom 0.3.2", "glob", - "half", + "half 2.5.0", "hex", "indexmap", "is-macro", @@ -2603,7 +2590,7 @@ dependencies = [ "ruff_python_parser", "ruff_source_file", "ruff_text_size", - "rustix", + "rustix 0.38.44", "rustpython-codegen", "rustpython-common", "rustpython-compiler", @@ -2744,6 +2731,16 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "serde_cbor" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" +dependencies = [ + "half 1.8.3", + "serde", +] + [[package]] name = "serde_derive" version = "1.0.219" @@ -2905,20 +2902,20 @@ dependencies = [ [[package]] name = "system-configuration" -version = "0.6.1" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" dependencies = [ - "bitflags 2.9.0", + "bitflags 1.3.2", "core-foundation", "system-configuration-sys", ] [[package]] name = "system-configuration-sys" -version = "0.6.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" dependencies = [ "core-foundation-sys", "libc", @@ -2948,6 +2945,15 @@ dependencies = [ "libc", ] +[[package]] +name = "textwrap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +dependencies = [ + "unicode-width 0.1.14", +] + [[package]] name = "textwrap" version = "0.16.2" @@ -3291,6 +3297,12 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "volatile" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8e76fae08f03f96e166d2dfda232190638c10e0383841252416f9cfe2ae60e6" + [[package]] name = "walkdir" version = "2.5.0" @@ -3411,13 +3423,13 @@ dependencies = [ [[package]] name = "which" -version = "7.0.3" +version = "6.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d643ce3fd3e5b54854602a080f34fb10ab75e0b813ee32d00ca2b44fa74762" +checksum = "b4ee928febd44d98f2f459a4a79bd4d928591333a494a10a868418ac1b39cf1f" dependencies = [ "either", - "env_home", - "rustix", + "home", + "rustix 0.38.44", "winsafe", ] @@ -3755,6 +3767,12 @@ dependencies = [ "syn 2.0.100", ] +[[package]] +name = "zlib-rs" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b20717f0917c908dc63de2e44e97f1e6b126ca58d0e391cee86d504eb8fbd05" + [[package]] name = "zlib-rs" version = "0.5.0" diff --git a/Cargo.toml b/Cargo.toml index fb066e5dfd..74fdf7464e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,15 +10,15 @@ repository.workspace = true license.workspace = true [features] -default = ["threading", "stdlib", "stdio", "importlib"] +default = ["threading", "stdlib", "importlib"] importlib = ["rustpython-vm/importlib"] encodings = ["rustpython-vm/encodings"] -stdio = ["rustpython-vm/stdio"] stdlib = ["rustpython-stdlib", "rustpython-pylib", "encodings"] flame-it = ["rustpython-vm/flame-it", "flame", "flamescope"] freeze-stdlib = ["stdlib", "rustpython-vm/freeze-stdlib", "rustpython-pylib?/freeze-stdlib"] jit = ["rustpython-vm/jit"] threading = ["rustpython-vm/threading", "rustpython-stdlib/threading"] +bz2 = ["stdlib", "rustpython-stdlib/bz2"] sqlite = ["rustpython-stdlib/sqlite"] ssl = ["rustpython-stdlib/ssl"] ssl-vendor = ["ssl", "rustpython-stdlib/ssl-vendor"] @@ -155,8 +155,7 @@ bitflags = "2.4.2" bstr = "1" cfg-if = "1.0" chrono = "0.4.39" -constant_time_eq = "0.4" -criterion = { version = "0.5", features = ["html_reports"] } +criterion = { version = "0.3.5", features = ["html_reports"] } crossbeam-utils = "0.8.21" flame = "0.2.2" getrandom = { version = "0.3", features = ["std"] } @@ -187,7 +186,7 @@ proc-macro2 = "1.0.93" quote = "1.0.38" rand = "0.9" rand_core = { version = "0.9", features = ["os_rng"] } -rustix = { version = "1.0", features = ["event"] } +rustix = { version = "0.38", features = ["event"] } rustyline = "15.0.0" serde = { version = "1.0.133", default-features = false } schannel = "0.1.27" diff --git a/Lib/colorsys.py b/Lib/colorsys.py index e97f91718a..bc897bd0f9 100644 --- a/Lib/colorsys.py +++ b/Lib/colorsys.py @@ -24,7 +24,7 @@ __all__ = ["rgb_to_yiq","yiq_to_rgb","rgb_to_hls","hls_to_rgb", "rgb_to_hsv","hsv_to_rgb"] -# Some floating-point constants +# Some floating point constants ONE_THIRD = 1.0/3.0 ONE_SIXTH = 1.0/6.0 diff --git a/Lib/graphlib.py b/Lib/graphlib.py index 9512865a8e..636545648e 100644 --- a/Lib/graphlib.py +++ b/Lib/graphlib.py @@ -154,7 +154,7 @@ def done(self, *nodes): This method unblocks any successor of each node in *nodes* for being returned in the future by a call to "get_ready". - Raises ValueError if any node in *nodes* has already been marked as + Raises :exec:`ValueError` if any node in *nodes* has already been marked as processed by a previous call to this method, if a node was not added to the graph by using "add" or if called without calling "prepare" previously or if node has not yet been returned by "get_ready". diff --git a/Lib/linecache.py b/Lib/linecache.py index dc02de19eb..97644a8e37 100644 --- a/Lib/linecache.py +++ b/Lib/linecache.py @@ -5,13 +5,17 @@ that name. """ +import functools +import sys +import os +import tokenize + __all__ = ["getline", "clearcache", "checkcache", "lazycache"] # The cache. Maps filenames to either a thunk which will provide source code, # or a tuple (size, mtime, lines, fullname) once loaded. cache = {} -_interactive_cache = {} def clearcache(): @@ -45,54 +49,28 @@ def getlines(filename, module_globals=None): return [] -def _getline_from_code(filename, lineno): - lines = _getlines_from_code(filename) - if 1 <= lineno <= len(lines): - return lines[lineno - 1] - return '' - -def _make_key(code): - return (code.co_filename, code.co_qualname, code.co_firstlineno) - -def _getlines_from_code(code): - code_id = _make_key(code) - if code_id in _interactive_cache: - entry = _interactive_cache[code_id] - if len(entry) != 1: - return _interactive_cache[code_id][2] - return [] - - def checkcache(filename=None): """Discard cache entries that are out of date. (This is not checked upon each call!)""" if filename is None: - # get keys atomically - filenames = cache.copy().keys() - else: + filenames = list(cache.keys()) + elif filename in cache: filenames = [filename] + else: + return for filename in filenames: - try: - entry = cache[filename] - except KeyError: - continue - + entry = cache[filename] if len(entry) == 1: # lazy cache entry, leave it lazy. continue size, mtime, lines, fullname = entry if mtime is None: continue # no-op for files loaded via a __loader__ - try: - # This import can fail if the interpreter is shutting down - import os - except ImportError: - return try: stat = os.stat(fullname) - except (OSError, ValueError): + except OSError: cache.pop(filename, None) continue if size != stat.st_size or mtime != stat.st_mtime: @@ -104,17 +82,6 @@ def updatecache(filename, module_globals=None): If something's wrong, print a message, discard the cache entry, and return an empty list.""" - # These imports are not at top level because linecache is in the critical - # path of the interpreter startup and importing os and sys take a lot of time - # and slows down the startup sequence. - try: - import os - import sys - import tokenize - except ImportError: - # These import can fail if the interpreter is shutting down - return [] - if filename in cache: if len(cache[filename]) != 1: cache.pop(filename, None) @@ -161,20 +128,16 @@ def updatecache(filename, module_globals=None): try: stat = os.stat(fullname) break - except (OSError, ValueError): + except OSError: pass else: return [] - except ValueError: # may be raised by os.stat() - return [] try: with tokenize.open(fullname) as fp: lines = fp.readlines() except (OSError, UnicodeDecodeError, SyntaxError): return [] - if not lines: - lines = ['\n'] - elif not lines[-1].endswith('\n'): + if lines and not lines[-1].endswith('\n'): lines[-1] += '\n' size, mtime = stat.st_size, stat.st_mtime cache[filename] = size, mtime, lines, fullname @@ -203,29 +166,17 @@ def lazycache(filename, module_globals): return False # Try for a __loader__, if available if module_globals and '__name__' in module_globals: - spec = module_globals.get('__spec__') - name = getattr(spec, 'name', None) or module_globals['__name__'] - loader = getattr(spec, 'loader', None) - if loader is None: - loader = module_globals.get('__loader__') + name = module_globals['__name__'] + if (loader := module_globals.get('__loader__')) is None: + if spec := module_globals.get('__spec__'): + try: + loader = spec.loader + except AttributeError: + pass get_source = getattr(loader, 'get_source', None) if name and get_source: - def get_lines(name=name, *args, **kwargs): - return get_source(name, *args, **kwargs) + get_lines = functools.partial(get_source, name) cache[filename] = (get_lines,) return True return False - -def _register_code(code, string, name): - entry = (len(string), - None, - [line + '\n' for line in string.splitlines()], - name) - stack = [code] - while stack: - code = stack.pop() - for const in code.co_consts: - if isinstance(const, type(code)): - stack.append(const) - _interactive_cache[_make_key(code)] = entry diff --git a/Lib/pprint.py b/Lib/pprint.py index 9314701db3..34ed12637e 100644 --- a/Lib/pprint.py +++ b/Lib/pprint.py @@ -128,9 +128,6 @@ def __init__(self, indent=1, width=80, depth=None, stream=None, *, sort_dicts If true, dict keys are sorted. - underscore_numbers - If true, digit groups are separated with underscores. - """ indent = int(indent) width = int(width) diff --git a/Lib/queue.py b/Lib/queue.py index 25beb46e30..55f5008846 100644 --- a/Lib/queue.py +++ b/Lib/queue.py @@ -10,15 +10,7 @@ except ImportError: SimpleQueue = None -__all__ = [ - 'Empty', - 'Full', - 'ShutDown', - 'Queue', - 'PriorityQueue', - 'LifoQueue', - 'SimpleQueue', -] +__all__ = ['Empty', 'Full', 'Queue', 'PriorityQueue', 'LifoQueue', 'SimpleQueue'] try: @@ -33,10 +25,6 @@ class Full(Exception): pass -class ShutDown(Exception): - '''Raised when put/get with shut-down queue.''' - - class Queue: '''Create a queue object with a given maximum size. @@ -66,9 +54,6 @@ def __init__(self, maxsize=0): self.all_tasks_done = threading.Condition(self.mutex) self.unfinished_tasks = 0 - # Queue shutdown state - self.is_shutdown = False - def task_done(self): '''Indicate that a formerly enqueued task is complete. @@ -80,9 +65,6 @@ def task_done(self): have been processed (meaning that a task_done() call was received for every item that had been put() into the queue). - shutdown(immediate=True) calls task_done() for each remaining item in - the queue. - Raises a ValueError if called more times than there were items placed in the queue. ''' @@ -147,12 +129,8 @@ def put(self, item, block=True, timeout=None): Otherwise ('block' is false), put an item on the queue if a free slot is immediately available, else raise the Full exception ('timeout' is ignored in that case). - - Raises ShutDown if the queue has been shut down. ''' with self.not_full: - if self.is_shutdown: - raise ShutDown if self.maxsize > 0: if not block: if self._qsize() >= self.maxsize: @@ -160,8 +138,6 @@ def put(self, item, block=True, timeout=None): elif timeout is None: while self._qsize() >= self.maxsize: self.not_full.wait() - if self.is_shutdown: - raise ShutDown elif timeout < 0: raise ValueError("'timeout' must be a non-negative number") else: @@ -171,8 +147,6 @@ def put(self, item, block=True, timeout=None): if remaining <= 0.0: raise Full self.not_full.wait(remaining) - if self.is_shutdown: - raise ShutDown self._put(item) self.unfinished_tasks += 1 self.not_empty.notify() @@ -187,21 +161,14 @@ def get(self, block=True, timeout=None): Otherwise ('block' is false), return an item if one is immediately available, else raise the Empty exception ('timeout' is ignored in that case). - - Raises ShutDown if the queue has been shut down and is empty, - or if the queue has been shut down immediately. ''' with self.not_empty: - if self.is_shutdown and not self._qsize(): - raise ShutDown if not block: if not self._qsize(): raise Empty elif timeout is None: while not self._qsize(): self.not_empty.wait() - if self.is_shutdown and not self._qsize(): - raise ShutDown elif timeout < 0: raise ValueError("'timeout' must be a non-negative number") else: @@ -211,8 +178,6 @@ def get(self, block=True, timeout=None): if remaining <= 0.0: raise Empty self.not_empty.wait(remaining) - if self.is_shutdown and not self._qsize(): - raise ShutDown item = self._get() self.not_full.notify() return item @@ -233,29 +198,6 @@ def get_nowait(self): ''' return self.get(block=False) - def shutdown(self, immediate=False): - '''Shut-down the queue, making queue gets and puts raise ShutDown. - - By default, gets will only raise once the queue is empty. Set - 'immediate' to True to make gets raise immediately instead. - - All blocked callers of put() and get() will be unblocked. If - 'immediate', a task is marked as done for each item remaining in - the queue, which may unblock callers of join(). - ''' - with self.mutex: - self.is_shutdown = True - if immediate: - while self._qsize(): - self._get() - if self.unfinished_tasks > 0: - self.unfinished_tasks -= 1 - # release all blocked threads in `join()` - self.all_tasks_done.notify_all() - # All getters need to re-check queue-empty to raise ShutDown - self.not_empty.notify_all() - self.not_full.notify_all() - # Override these methods to implement other queue organizations # (e.g. stack or priority queue). # These will only be called with appropriate locks held diff --git a/Lib/sched.py b/Lib/sched.py index fb20639d45..14613cf298 100644 --- a/Lib/sched.py +++ b/Lib/sched.py @@ -11,7 +11,7 @@ implement simulated time by writing your own functions. This can also be used to integrate scheduling with STDWIN events; the delay function is allowed to modify the queue. Time can be expressed as -integers or floating-point numbers, as long as it is consistent. +integers or floating point numbers, as long as it is consistent. Events are specified by tuples (time, priority, action, argument, kwargs). As in UNIX, lower priority numbers mean higher priority; in this diff --git a/Lib/tarfile.py b/Lib/tarfile.py index 3bbbcaa621..dea150e8db 100755 --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -46,7 +46,6 @@ import struct import copy import re -import warnings try: import pwd @@ -58,19 +57,19 @@ grp = None # os.symlink on Windows prior to 6.0 raises NotImplementedError -# OSError (winerror=1314) will be raised if the caller does not hold the -# SeCreateSymbolicLinkPrivilege privilege -symlink_exception = (AttributeError, NotImplementedError, OSError) +symlink_exception = (AttributeError, NotImplementedError) +try: + # OSError (winerror=1314) will be raised if the caller does not hold the + # SeCreateSymbolicLinkPrivilege privilege + symlink_exception += (OSError,) +except NameError: + pass # from tarfile import * __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError", "CompressionError", "StreamError", "ExtractError", "HeaderError", "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT", - "DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter", - "tar_filter", "FilterError", "AbsoluteLinkError", - "OutsideDestinationError", "SpecialFileError", "AbsolutePathError", - "LinkOutsideDestinationError"] - + "DEFAULT_FORMAT", "open"] #--------------------------------------------------------- # tar constants @@ -159,8 +158,6 @@ def stn(s, length, encoding, errors): """Convert a string to a null-terminated bytes object. """ - if s is None: - raise ValueError("metadata cannot contain None") s = s.encode(encoding, errors) return s[:length] + (length - len(s)) * NUL @@ -331,17 +328,15 @@ def write(self, s): class _Stream: """Class that serves as an adapter between TarFile and a stream-like object. The stream-like object only - needs to have a read() or write() method that works with bytes, - and the method is accessed blockwise. - Use of gzip or bzip2 compression is possible. - A stream-like object could be for example: sys.stdin.buffer, - sys.stdout.buffer, a socket, a tape device etc. + needs to have a read() or write() method and is accessed + blockwise. Use of gzip or bzip2 compression is possible. + A stream-like object could be for example: sys.stdin, + sys.stdout, a socket, a tape device etc. _Stream is intended to be used only internally. """ - def __init__(self, name, mode, comptype, fileobj, bufsize, - compresslevel): + def __init__(self, name, mode, comptype, fileobj, bufsize): """Construct a _Stream object. """ self._extfileobj = True @@ -373,10 +368,10 @@ def __init__(self, name, mode, comptype, fileobj, bufsize, self.zlib = zlib self.crc = zlib.crc32(b"") if mode == "r": - self.exception = zlib.error self._init_read_gz() + self.exception = zlib.error else: - self._init_write_gz(compresslevel) + self._init_write_gz() elif comptype == "bz2": try: @@ -388,7 +383,7 @@ def __init__(self, name, mode, comptype, fileobj, bufsize, self.cmp = bz2.BZ2Decompressor() self.exception = OSError else: - self.cmp = bz2.BZ2Compressor(compresslevel) + self.cmp = bz2.BZ2Compressor() elif comptype == "xz": try: @@ -415,14 +410,13 @@ def __del__(self): if hasattr(self, "closed") and not self.closed: self.close() - def _init_write_gz(self, compresslevel): + def _init_write_gz(self): """Initialize for writing with gzip compression. """ - self.cmp = self.zlib.compressobj(compresslevel, - self.zlib.DEFLATED, - -self.zlib.MAX_WBITS, - self.zlib.DEF_MEM_LEVEL, - 0) + self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED, + -self.zlib.MAX_WBITS, + self.zlib.DEF_MEM_LEVEL, + 0) timestamp = struct.pack("" % (self.__class__.__name__,self.name,id(self)) - def replace(self, *, - name=_KEEP, mtime=_KEEP, mode=_KEEP, linkname=_KEEP, - uid=_KEEP, gid=_KEEP, uname=_KEEP, gname=_KEEP, - deep=True, _KEEP=_KEEP): - """Return a deep copy of self with the given attributes replaced. - """ - if deep: - result = copy.deepcopy(self) - else: - result = copy.copy(self) - if name is not _KEEP: - result.name = name - if mtime is not _KEEP: - result.mtime = mtime - if mode is not _KEEP: - result.mode = mode - if linkname is not _KEEP: - result.linkname = linkname - if uid is not _KEEP: - result.uid = uid - if gid is not _KEEP: - result.gid = gid - if uname is not _KEEP: - result.uname = uname - if gname is not _KEEP: - result.gname = gname - return result - def get_info(self): """Return the TarInfo's attributes as a dictionary. """ - if self.mode is None: - mode = None - else: - mode = self.mode & 0o7777 info = { "name": self.name, - "mode": mode, + "mode": self.mode & 0o7777, "uid": self.uid, "gid": self.gid, "size": self.size, @@ -983,9 +820,6 @@ def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescap """Return a tar header as a string of 512 byte blocks. """ info = self.get_info() - for name, value in info.items(): - if value is None: - raise ValueError("%s may not be None" % name) if format == USTAR_FORMAT: return self.create_ustar_header(info, encoding, errors) @@ -1116,12 +950,6 @@ def _create_header(info, format, encoding, errors): devmajor = stn("", 8, encoding, errors) devminor = stn("", 8, encoding, errors) - # None values in metadata should cause ValueError. - # itn()/stn() do this for all fields except type. - filetype = info.get("type", REGTYPE) - if filetype is None: - raise ValueError("TarInfo.type must not be None") - parts = [ stn(info.get("name", ""), 100, encoding, errors), itn(info.get("mode", 0) & 0o7777, 8, format), @@ -1130,7 +958,7 @@ def _create_header(info, format, encoding, errors): itn(info.get("size", 0), 12, format), itn(info.get("mtime", 0), 12, format), b" ", # checksum field - filetype, + info.get("type", REGTYPE), stn(info.get("linkname", ""), 100, encoding, errors), info.get("magic", POSIX_MAGIC), stn(info.get("uname", ""), 32, encoding, errors), @@ -1436,7 +1264,11 @@ def _proc_pax(self, tarfile): # the newline. keyword and value are both UTF-8 encoded strings. regex = re.compile(br"(\d+) ([^=]+)=") pos = 0 - while match := regex.match(buf, pos): + while True: + match = regex.match(buf, pos) + if not match: + break + length, keyword = match.groups() length = int(length) if length == 0: @@ -1636,8 +1468,6 @@ class TarFile(object): fileobject = ExFileObject # The file-object for extractfile(). - extraction_filter = None # The default filter for extraction. - def __init__(self, name=None, mode="r", fileobj=None, format=None, tarinfo=None, dereference=None, ignore_zeros=None, encoding=None, errors="surrogateescape", pax_headers=None, debug=None, @@ -1829,9 +1659,7 @@ def not_compressed(comptype): if filemode not in ("r", "w"): raise ValueError("mode must be 'r' or 'w'") - compresslevel = kwargs.pop("compresslevel", 9) - stream = _Stream(name, filemode, comptype, fileobj, bufsize, - compresslevel) + stream = _Stream(name, filemode, comptype, fileobj, bufsize) try: t = cls(name, filemode, stream, **kwargs) except: @@ -2112,10 +1940,7 @@ def list(self, verbose=True, *, members=None): members = self for tarinfo in members: if verbose: - if tarinfo.mode is None: - _safe_print("??????????") - else: - _safe_print(stat.filemode(tarinfo.mode)) + _safe_print(stat.filemode(tarinfo.mode)) _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid, tarinfo.gname or tarinfo.gid)) if tarinfo.ischr() or tarinfo.isblk(): @@ -2123,11 +1948,8 @@ def list(self, verbose=True, *, members=None): ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor))) else: _safe_print("%10d" % tarinfo.size) - if tarinfo.mtime is None: - _safe_print("????-??-?? ??:??:??") - else: - _safe_print("%d-%02d-%02d %02d:%02d:%02d" \ - % time.localtime(tarinfo.mtime)[:6]) + _safe_print("%d-%02d-%02d %02d:%02d:%02d" \ + % time.localtime(tarinfo.mtime)[:6]) _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else "")) @@ -2214,63 +2036,32 @@ def addfile(self, tarinfo, fileobj=None): self.members.append(tarinfo) - def _get_filter_function(self, filter): - if filter is None: - filter = self.extraction_filter - if filter is None: - warnings.warn( - 'Python 3.14 will, by default, filter extracted tar ' - + 'archives and reject files or modify their metadata. ' - + 'Use the filter argument to control this behavior.', - DeprecationWarning) - return fully_trusted_filter - if isinstance(filter, str): - raise TypeError( - 'String names are not supported for ' - + 'TarFile.extraction_filter. Use a function such as ' - + 'tarfile.data_filter directly.') - return filter - if callable(filter): - return filter - try: - return _NAMED_FILTERS[filter] - except KeyError: - raise ValueError(f"filter {filter!r} not found") from None - - def extractall(self, path=".", members=None, *, numeric_owner=False, - filter=None): + def extractall(self, path=".", members=None, *, numeric_owner=False): """Extract all members from the archive to the current working directory and set owner, modification time and permissions on directories afterwards. `path' specifies a different directory to extract to. `members' is optional and must be a subset of the list returned by getmembers(). If `numeric_owner` is True, only the numbers for user/group names are used and not the names. - - The `filter` function will be called on each member just - before extraction. - It can return a changed TarInfo or None to skip the member. - String names of common filters are accepted. """ directories = [] - filter_function = self._get_filter_function(filter) if members is None: members = self - for member in members: - tarinfo = self._get_extract_tarinfo(member, filter_function, path) - if tarinfo is None: - continue + for tarinfo in members: if tarinfo.isdir(): - # For directories, delay setting attributes until later, - # since permissions can interfere with extraction and - # extracting contents can reset mtime. + # Extract directories with a safe mode. directories.append(tarinfo) - self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(), - numeric_owner=numeric_owner) + tarinfo = copy.copy(tarinfo) + tarinfo.mode = 0o700 + # Do not set_attrs directories, as we will do that further down + self.extract(tarinfo, path, set_attrs=not tarinfo.isdir(), + numeric_owner=numeric_owner) # Reverse sort directories. - directories.sort(key=lambda a: a.name, reverse=True) + directories.sort(key=lambda a: a.name) + directories.reverse() # Set correct owner, mtime and filemode on directories. for tarinfo in directories: @@ -2280,10 +2071,12 @@ def extractall(self, path=".", members=None, *, numeric_owner=False, self.utime(tarinfo, dirpath) self.chmod(tarinfo, dirpath) except ExtractError as e: - self._handle_nonfatal_error(e) + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) - def extract(self, member, path="", set_attrs=True, *, numeric_owner=False, - filter=None): + def extract(self, member, path="", set_attrs=True, *, numeric_owner=False): """Extract a member from the archive to the current working directory, using its full name. Its file information is extracted as accurately as possible. `member' may be a filename or a TarInfo object. You can @@ -2291,70 +2084,35 @@ def extract(self, member, path="", set_attrs=True, *, numeric_owner=False, mtime, mode) are set unless `set_attrs' is False. If `numeric_owner` is True, only the numbers for user/group names are used and not the names. - - The `filter` function will be called before extraction. - It can return a changed TarInfo or None to skip the member. - String names of common filters are accepted. """ - filter_function = self._get_filter_function(filter) - tarinfo = self._get_extract_tarinfo(member, filter_function, path) - if tarinfo is not None: - self._extract_one(tarinfo, path, set_attrs, numeric_owner) + self._check("r") - def _get_extract_tarinfo(self, member, filter_function, path): - """Get filtered TarInfo (or None) from member, which might be a str""" if isinstance(member, str): tarinfo = self.getmember(member) else: tarinfo = member - unfiltered = tarinfo - try: - tarinfo = filter_function(tarinfo, path) - except (OSError, FilterError) as e: - self._handle_fatal_error(e) - except ExtractError as e: - self._handle_nonfatal_error(e) - if tarinfo is None: - self._dbg(2, "tarfile: Excluded %r" % unfiltered.name) - return None # Prepare the link target for makelink(). if tarinfo.islnk(): - tarinfo = copy.copy(tarinfo) tarinfo._link_target = os.path.join(path, tarinfo.linkname) - return tarinfo - - def _extract_one(self, tarinfo, path, set_attrs, numeric_owner): - """Extract from filtered tarinfo to disk""" - self._check("r") try: self._extract_member(tarinfo, os.path.join(path, tarinfo.name), set_attrs=set_attrs, numeric_owner=numeric_owner) except OSError as e: - self._handle_fatal_error(e) + if self.errorlevel > 0: + raise + else: + if e.filename is None: + self._dbg(1, "tarfile: %s" % e.strerror) + else: + self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) except ExtractError as e: - self._handle_nonfatal_error(e) - - def _handle_nonfatal_error(self, e): - """Handle non-fatal error (ExtractError) according to errorlevel""" - if self.errorlevel > 1: - raise - else: - self._dbg(1, "tarfile: %s" % e) - - def _handle_fatal_error(self, e): - """Handle "fatal" error according to self.errorlevel""" - if self.errorlevel > 0: - raise - elif isinstance(e, OSError): - if e.filename is None: - self._dbg(1, "tarfile: %s" % e.strerror) + if self.errorlevel > 1: + raise else: - self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) - else: - self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e)) + self._dbg(1, "tarfile: %s" % e) def extractfile(self, member): """Extract a member from the archive as a file object. `member' may be @@ -2441,16 +2199,11 @@ def makedir(self, tarinfo, targetpath): """Make a directory called targetpath. """ try: - if tarinfo.mode is None: - # Use the system's default mode - os.mkdir(targetpath) - else: - # Use a safe mode for the directory, the real mode is set - # later in _extract_member(). - os.mkdir(targetpath, 0o700) + # Use a safe mode for the directory, the real mode is set + # later in _extract_member(). + os.mkdir(targetpath, 0o700) except FileExistsError: - if not os.path.isdir(targetpath): - raise + pass def makefile(self, tarinfo, targetpath): """Make a file called targetpath. @@ -2491,9 +2244,6 @@ def makedev(self, tarinfo, targetpath): raise ExtractError("special devices not supported by system") mode = tarinfo.mode - if mode is None: - # Use mknod's default - mode = 0o600 if tarinfo.isblk(): mode |= stat.S_IFBLK else: @@ -2515,6 +2265,7 @@ def makelink(self, tarinfo, targetpath): os.unlink(targetpath) os.symlink(tarinfo.linkname, targetpath) else: + # See extract(). if os.path.exists(tarinfo._link_target): os.link(tarinfo._link_target, targetpath) else: @@ -2539,19 +2290,15 @@ def chown(self, tarinfo, targetpath, numeric_owner): u = tarinfo.uid if not numeric_owner: try: - if grp and tarinfo.gname: + if grp: g = grp.getgrnam(tarinfo.gname)[2] except KeyError: pass try: - if pwd and tarinfo.uname: + if pwd: u = pwd.getpwnam(tarinfo.uname)[2] except KeyError: pass - if g is None: - g = -1 - if u is None: - u = -1 try: if tarinfo.issym() and hasattr(os, "lchown"): os.lchown(targetpath, u, g) @@ -2563,8 +2310,6 @@ def chown(self, tarinfo, targetpath, numeric_owner): def chmod(self, tarinfo, targetpath): """Set file permissions of targetpath according to tarinfo. """ - if tarinfo.mode is None: - return try: os.chmod(targetpath, tarinfo.mode) except OSError as e: @@ -2573,13 +2318,10 @@ def chmod(self, tarinfo, targetpath): def utime(self, tarinfo, targetpath): """Set modification time of targetpath according to tarinfo. """ - mtime = tarinfo.mtime - if mtime is None: - return if not hasattr(os, 'utime'): return try: - os.utime(targetpath, (mtime, mtime)) + os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) except OSError as e: raise ExtractError("could not change modification time") from e @@ -2597,8 +2339,6 @@ def next(self): # Advance the file pointer. if self.offset != self.fileobj.tell(): - if self.offset == 0: - return None self.fileobj.seek(self.offset - 1) if not self.fileobj.read(1): raise ReadError("unexpected end of data") @@ -2657,26 +2397,13 @@ def _getmember(self, name, tarinfo=None, normalize=False): members = self.getmembers() # Limit the member search list up to tarinfo. - skipping = False if tarinfo is not None: - try: - index = members.index(tarinfo) - except ValueError: - # The given starting point might be a (modified) copy. - # We'll later skip members until we find an equivalent. - skipping = True - else: - # Happy fast path - members = members[:index] + members = members[:members.index(tarinfo)] if normalize: name = os.path.normpath(name) for member in reversed(members): - if skipping: - if tarinfo.offset == member.offset: - skipping = False - continue if normalize: member_name = os.path.normpath(member.name) else: @@ -2685,16 +2412,14 @@ def _getmember(self, name, tarinfo=None, normalize=False): if name == member_name: return member - if skipping: - # Starting point was not found - raise ValueError(tarinfo) - def _load(self): """Read through the entire archive file and look for readable members. """ - while self.next() is not None: - pass + while True: + tarinfo = self.next() + if tarinfo is None: + break self._loaded = True def _check(self, mode=None): @@ -2779,7 +2504,6 @@ def __exit__(self, type, value, traceback): #-------------------- # exported functions #-------------------- - def is_tarfile(name): """Return True if name points to a tar archive that we are able to handle, else return False. @@ -2788,9 +2512,7 @@ def is_tarfile(name): """ try: if hasattr(name, "read"): - pos = name.tell() t = open(fileobj=name) - name.seek(pos) else: t = open(name) t.close() @@ -2808,10 +2530,6 @@ def main(): parser = argparse.ArgumentParser(description=description) parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Verbose output') - parser.add_argument('--filter', metavar='', - choices=_NAMED_FILTERS, - help='Filter for extraction') - group = parser.add_mutually_exclusive_group(required=True) group.add_argument('-l', '--list', metavar='', help='Show listing of a tarfile') @@ -2823,12 +2541,8 @@ def main(): help='Create tarfile from sources') group.add_argument('-t', '--test', metavar='', help='Test if a tarfile is valid') - args = parser.parse_args() - if args.filter and args.extract is None: - parser.exit(1, '--filter is only valid for extraction\n') - if args.test is not None: src = args.test if is_tarfile(src): @@ -2859,7 +2573,7 @@ def main(): if is_tarfile(src): with TarFile.open(src, 'r:*') as tf: - tf.extractall(path=curdir, filter=args.filter) + tf.extractall(path=curdir) if args.verbose: if curdir == '.': msg = '{!r} file is extracted.'.format(src) diff --git a/Lib/test/test___all__.py b/Lib/test/test___all__.py index 7b5356ea02..a620dd5b4c 100644 --- a/Lib/test/test___all__.py +++ b/Lib/test/test___all__.py @@ -5,21 +5,17 @@ import sys import types +try: + import _multiprocessing +except ModuleNotFoundError: + _multiprocessing = None + if support.check_sanitizer(address=True, memory=True): - SKIP_MODULES = frozenset(( - # gh-90791: Tests involving libX11 can SEGFAULT on ASAN/MSAN builds. - # Skip modules, packages and tests using '_tkinter'. - '_tkinter', - 'tkinter', - 'test_tkinter', - 'test_ttk', - 'test_ttk_textonly', - 'idlelib', - 'test_idle', - )) -else: - SKIP_MODULES = () + # bpo-46633: test___all__ is skipped because importing some modules + # directly can trigger known problems with ASAN (like tk or crypt). + raise unittest.SkipTest("workaround ASAN build issues on loading tests " + "like tk or crypt") class NoAll(RuntimeError): @@ -31,6 +27,17 @@ class FailedImport(RuntimeError): class AllTest(unittest.TestCase): + def setUp(self): + # concurrent.futures uses a __getattr__ hook. Its __all__ triggers + # import of a submodule, which fails when _multiprocessing is not + # available. + if _multiprocessing is None: + sys.modules["_multiprocessing"] = types.ModuleType("_multiprocessing") + + def tearDown(self): + if _multiprocessing is None: + sys.modules.pop("_multiprocessing") + def check_all(self, modname): names = {} with warnings_helper.check_warnings( @@ -76,24 +83,16 @@ def walk_modules(self, basedir, modpath): for fn in sorted(os.listdir(basedir)): path = os.path.join(basedir, fn) if os.path.isdir(path): - if fn in SKIP_MODULES: - continue pkg_init = os.path.join(path, '__init__.py') if os.path.exists(pkg_init): yield pkg_init, modpath + fn for p, m in self.walk_modules(path, modpath + fn + "."): yield p, m continue - - if fn == '__init__.py': + if not fn.endswith('.py') or fn == '__init__.py': continue - if not fn.endswith('.py'): - continue - modname = fn.removesuffix('.py') - if modname in SKIP_MODULES: - continue - yield path, modpath + modname - + yield path, modpath + fn[:-3] + # TODO: RUSTPYTHON @unittest.expectedFailure def test_all(self): @@ -104,8 +103,7 @@ def test_all(self): ]) # In case _socket fails to build, make this test fail more gracefully - # than an AttributeError somewhere deep in concurrent.futures, email - # or unittest. + # than an AttributeError somewhere deep in CGIHTTPServer. import _socket ignored = [] @@ -122,14 +120,14 @@ def test_all(self): if denied: continue if support.verbose: - print(f"Check {modname}", flush=True) + print(modname) try: # This heuristic speeds up the process by removing, de facto, # most test modules (and avoiding the auto-executing ones). with open(path, "rb") as f: if b"__all__" not in f.read(): raise NoAll(modname) - self.check_all(modname) + self.check_all(modname) except NoAll: ignored.append(modname) except FailedImport: diff --git a/Lib/test/test_android.py b/Lib/test/test_android.py deleted file mode 100644 index 076190f757..0000000000 --- a/Lib/test/test_android.py +++ /dev/null @@ -1,448 +0,0 @@ -import io -import platform -import queue -import re -import subprocess -import sys -import unittest -from _android_support import TextLogStream -from array import array -from contextlib import ExitStack, contextmanager -from threading import Thread -from test.support import LOOPBACK_TIMEOUT -from time import time -from unittest.mock import patch - - -if sys.platform != "android": - raise unittest.SkipTest("Android-specific") - -api_level = platform.android_ver().api_level - -# (name, level, fileno) -STREAM_INFO = [("stdout", "I", 1), ("stderr", "W", 2)] - - -# Test redirection of stdout and stderr to the Android log. -@unittest.skipIf( - api_level < 23 and platform.machine() == "aarch64", - "SELinux blocks reading logs on older ARM64 emulators" -) -class TestAndroidOutput(unittest.TestCase): - maxDiff = None - - def setUp(self): - self.logcat_process = subprocess.Popen( - ["logcat", "-v", "tag"], stdout=subprocess.PIPE, - errors="backslashreplace" - ) - self.logcat_queue = queue.Queue() - - def logcat_thread(): - for line in self.logcat_process.stdout: - self.logcat_queue.put(line.rstrip("\n")) - self.logcat_process.stdout.close() - self.logcat_thread = Thread(target=logcat_thread) - self.logcat_thread.start() - - from ctypes import CDLL, c_char_p, c_int - android_log_write = getattr(CDLL("liblog.so"), "__android_log_write") - android_log_write.argtypes = (c_int, c_char_p, c_char_p) - ANDROID_LOG_INFO = 4 - - # Separate tests using a marker line with a different tag. - tag, message = "python.test", f"{self.id()} {time()}" - android_log_write( - ANDROID_LOG_INFO, tag.encode("UTF-8"), message.encode("UTF-8")) - self.assert_log("I", tag, message, skip=True, timeout=5) - - def assert_logs(self, level, tag, expected, **kwargs): - for line in expected: - self.assert_log(level, tag, line, **kwargs) - - def assert_log(self, level, tag, expected, *, skip=False, timeout=0.5): - deadline = time() + timeout - while True: - try: - line = self.logcat_queue.get(timeout=(deadline - time())) - except queue.Empty: - self.fail(f"line not found: {expected!r}") - if match := re.fullmatch(fr"(.)/{tag}: (.*)", line): - try: - self.assertEqual(level, match[1]) - self.assertEqual(expected, match[2]) - break - except AssertionError: - if not skip: - raise - - def tearDown(self): - self.logcat_process.terminate() - self.logcat_process.wait(LOOPBACK_TIMEOUT) - self.logcat_thread.join(LOOPBACK_TIMEOUT) - - @contextmanager - def unbuffered(self, stream): - stream.reconfigure(write_through=True) - try: - yield - finally: - stream.reconfigure(write_through=False) - - # In --verbose3 mode, sys.stdout and sys.stderr are captured, so we can't - # test them directly. Detect this mode and use some temporary streams with - # the same properties. - def stream_context(self, stream_name, level): - # https://developer.android.com/ndk/reference/group/logging - prio = {"I": 4, "W": 5}[level] - - stack = ExitStack() - stack.enter_context(self.subTest(stream_name)) - stream = getattr(sys, stream_name) - native_stream = getattr(sys, f"__{stream_name}__") - if isinstance(stream, io.StringIO): - stack.enter_context( - patch( - f"sys.{stream_name}", - TextLogStream( - prio, f"python.{stream_name}", native_stream.fileno(), - errors="backslashreplace" - ), - ) - ) - return stack - - def test_str(self): - for stream_name, level, fileno in STREAM_INFO: - with self.stream_context(stream_name, level): - stream = getattr(sys, stream_name) - tag = f"python.{stream_name}" - self.assertEqual(f"", repr(stream)) - - self.assertIs(stream.writable(), True) - self.assertIs(stream.readable(), False) - self.assertEqual(stream.fileno(), fileno) - self.assertEqual("UTF-8", stream.encoding) - self.assertEqual("backslashreplace", stream.errors) - self.assertIs(stream.line_buffering, True) - self.assertIs(stream.write_through, False) - - def write(s, lines=None, *, write_len=None): - if write_len is None: - write_len = len(s) - self.assertEqual(write_len, stream.write(s)) - if lines is None: - lines = [s] - self.assert_logs(level, tag, lines) - - # Single-line messages, - with self.unbuffered(stream): - write("", []) - - write("a") - write("Hello") - write("Hello world") - write(" ") - write(" ") - - # Non-ASCII text - write("ol\u00e9") # Spanish - write("\u4e2d\u6587") # Chinese - - # Non-BMP emoji - write("\U0001f600") - - # Non-encodable surrogates - write("\ud800\udc00", [r"\ud800\udc00"]) - - # Code used by surrogateescape (which isn't enabled here) - write("\udc80", [r"\udc80"]) - - # Null characters are logged using "modified UTF-8". - write("\u0000", [r"\xc0\x80"]) - write("a\u0000", [r"a\xc0\x80"]) - write("\u0000b", [r"\xc0\x80b"]) - write("a\u0000b", [r"a\xc0\x80b"]) - - # Multi-line messages. Avoid identical consecutive lines, as - # they may activate "chatty" filtering and break the tests. - write("\nx", [""]) - write("\na\n", ["x", "a"]) - write("\n", [""]) - write("b\n", ["b"]) - write("c\n\n", ["c", ""]) - write("d\ne", ["d"]) - write("xx", []) - write("f\n\ng", ["exxf", ""]) - write("\n", ["g"]) - - # Since this is a line-based logging system, line buffering - # cannot be turned off, i.e. a newline always causes a flush. - stream.reconfigure(line_buffering=False) - self.assertIs(stream.line_buffering, True) - - # However, buffering can be turned off completely if you want a - # flush after every write. - with self.unbuffered(stream): - write("\nx", ["", "x"]) - write("\na\n", ["", "a"]) - write("\n", [""]) - write("b\n", ["b"]) - write("c\n\n", ["c", ""]) - write("d\ne", ["d", "e"]) - write("xx", ["xx"]) - write("f\n\ng", ["f", "", "g"]) - write("\n", [""]) - - # "\r\n" should be translated into "\n". - write("hello\r\n", ["hello"]) - write("hello\r\nworld\r\n", ["hello", "world"]) - write("\r\n", [""]) - - # Non-standard line separators should be preserved. - write("before form feed\x0cafter form feed\n", - ["before form feed\x0cafter form feed"]) - write("before line separator\u2028after line separator\n", - ["before line separator\u2028after line separator"]) - - # String subclasses are accepted, but they should be converted - # to a standard str without calling any of their methods. - class CustomStr(str): - def splitlines(self, *args, **kwargs): - raise AssertionError() - - def __len__(self): - raise AssertionError() - - def __str__(self): - raise AssertionError() - - write(CustomStr("custom\n"), ["custom"], write_len=7) - - # Non-string classes are not accepted. - for obj in [b"", b"hello", None, 42]: - with self.subTest(obj=obj): - with self.assertRaisesRegex( - TypeError, - fr"write\(\) argument must be str, not " - fr"{type(obj).__name__}" - ): - stream.write(obj) - - # Manual flushing is supported. - write("hello", []) - stream.flush() - self.assert_log(level, tag, "hello") - write("hello", []) - write("world", []) - stream.flush() - self.assert_log(level, tag, "helloworld") - - # Long lines are split into blocks of 1000 characters - # (MAX_CHARS_PER_WRITE in _android_support.py), but - # TextIOWrapper should then join them back together as much as - # possible without exceeding 4000 UTF-8 bytes - # (MAX_BYTES_PER_WRITE). - # - # ASCII (1 byte per character) - write(("foobar" * 700) + "\n", # 4200 bytes in - [("foobar" * 666) + "foob", # 4000 bytes out - "ar" + ("foobar" * 33)]) # 200 bytes out - - # "Full-width" digits 0-9 (3 bytes per character) - s = "\uff10\uff11\uff12\uff13\uff14\uff15\uff16\uff17\uff18\uff19" - write((s * 150) + "\n", # 4500 bytes in - [s * 100, # 3000 bytes out - s * 50]) # 1500 bytes out - - s = "0123456789" - write(s * 200, []) # 2000 bytes in - write(s * 150, []) # 1500 bytes in - write(s * 51, [s * 350]) # 510 bytes in, 3500 bytes out - write("\n", [s * 51]) # 0 bytes in, 510 bytes out - - def test_bytes(self): - for stream_name, level, fileno in STREAM_INFO: - with self.stream_context(stream_name, level): - stream = getattr(sys, stream_name).buffer - tag = f"python.{stream_name}" - self.assertEqual(f"", repr(stream)) - self.assertIs(stream.writable(), True) - self.assertIs(stream.readable(), False) - self.assertEqual(stream.fileno(), fileno) - - def write(b, lines=None, *, write_len=None): - if write_len is None: - write_len = len(b) - self.assertEqual(write_len, stream.write(b)) - if lines is None: - lines = [b.decode()] - self.assert_logs(level, tag, lines) - - # Single-line messages, - write(b"", []) - - write(b"a") - write(b"Hello") - write(b"Hello world") - write(b" ") - write(b" ") - - # Non-ASCII text - write(b"ol\xc3\xa9") # Spanish - write(b"\xe4\xb8\xad\xe6\x96\x87") # Chinese - - # Non-BMP emoji - write(b"\xf0\x9f\x98\x80") - - # Null bytes are logged using "modified UTF-8". - write(b"\x00", [r"\xc0\x80"]) - write(b"a\x00", [r"a\xc0\x80"]) - write(b"\x00b", [r"\xc0\x80b"]) - write(b"a\x00b", [r"a\xc0\x80b"]) - - # Invalid UTF-8 - write(b"\xff", [r"\xff"]) - write(b"a\xff", [r"a\xff"]) - write(b"\xffb", [r"\xffb"]) - write(b"a\xffb", [r"a\xffb"]) - - # Log entries containing newlines are shown differently by - # `logcat -v tag`, `logcat -v long`, and Android Studio. We - # currently use `logcat -v tag`, which shows each line as if it - # was a separate log entry, but strips a single trailing - # newline. - # - # On newer versions of Android, all three of the above tools (or - # maybe Logcat itself) will also strip any number of leading - # newlines. - write(b"\nx", ["", "x"] if api_level < 30 else ["x"]) - write(b"\na\n", ["", "a"] if api_level < 30 else ["a"]) - write(b"\n", [""]) - write(b"b\n", ["b"]) - write(b"c\n\n", ["c", ""]) - write(b"d\ne", ["d", "e"]) - write(b"xx", ["xx"]) - write(b"f\n\ng", ["f", "", "g"]) - write(b"\n", [""]) - - # "\r\n" should be translated into "\n". - write(b"hello\r\n", ["hello"]) - write(b"hello\r\nworld\r\n", ["hello", "world"]) - write(b"\r\n", [""]) - - # Other bytes-like objects are accepted. - write(bytearray(b"bytearray")) - - mv = memoryview(b"memoryview") - write(mv, ["memoryview"]) # Continuous - write(mv[::2], ["mmrve"]) # Discontinuous - - write( - # Android only supports little-endian architectures, so the - # bytes representation is as follows: - array("H", [ - 0, # 00 00 - 1, # 01 00 - 65534, # FE FF - 65535, # FF FF - ]), - - # After encoding null bytes with modified UTF-8, the only - # valid UTF-8 sequence is \x01. All other bytes are handled - # by backslashreplace. - ["\\xc0\\x80\\xc0\\x80" - "\x01\\xc0\\x80" - "\\xfe\\xff" - "\\xff\\xff"], - write_len=8, - ) - - # Non-bytes-like classes are not accepted. - for obj in ["", "hello", None, 42]: - with self.subTest(obj=obj): - with self.assertRaisesRegex( - TypeError, - fr"write\(\) argument must be bytes-like, not " - fr"{type(obj).__name__}" - ): - stream.write(obj) - - -class TestAndroidRateLimit(unittest.TestCase): - def test_rate_limit(self): - # https://cs.android.com/android/platform/superproject/+/android-14.0.0_r1:system/logging/liblog/include/log/log_read.h;l=39 - PER_MESSAGE_OVERHEAD = 28 - - # https://developer.android.com/ndk/reference/group/logging - ANDROID_LOG_DEBUG = 3 - - # To avoid flooding the test script output, use a different tag rather - # than stdout or stderr. - tag = "python.rate_limit" - stream = TextLogStream(ANDROID_LOG_DEBUG, tag) - - # Make a test message which consumes 1 KB of the logcat buffer. - message = "Line {:03d} " - message += "." * ( - 1024 - PER_MESSAGE_OVERHEAD - len(tag) - len(message.format(0)) - ) + "\n" - - # To avoid depending on the performance of the test device, we mock the - # passage of time. - mock_now = time() - - def mock_time(): - # Avoid division by zero by simulating a small delay. - mock_sleep(0.0001) - return mock_now - - def mock_sleep(duration): - nonlocal mock_now - mock_now += duration - - # See _android_support.py. The default values of these parameters work - # well across a wide range of devices, but we'll use smaller values to - # ensure a quick and reliable test that doesn't flood the log too much. - MAX_KB_PER_SECOND = 100 - BUCKET_KB = 10 - with ( - patch("_android_support.MAX_BYTES_PER_SECOND", MAX_KB_PER_SECOND * 1024), - patch("_android_support.BUCKET_SIZE", BUCKET_KB * 1024), - patch("_android_support.sleep", mock_sleep), - patch("_android_support.time", mock_time), - ): - # Make sure the token bucket is full. - stream.write("Initial message to reset _prev_write_time") - mock_sleep(BUCKET_KB / MAX_KB_PER_SECOND) - line_num = 0 - - # Write BUCKET_KB messages, and return the rate at which they were - # accepted in KB per second. - def write_bucketful(): - nonlocal line_num - start = mock_time() - max_line_num = line_num + BUCKET_KB - while line_num < max_line_num: - stream.write(message.format(line_num)) - line_num += 1 - return BUCKET_KB / (mock_time() - start) - - # The first bucketful should be written with minimal delay. The - # factor of 2 here is not arbitrary: it verifies that the system can - # write fast enough to empty the bucket within two bucketfuls, which - # the next part of the test depends on. - self.assertGreater(write_bucketful(), MAX_KB_PER_SECOND * 2) - - # Write another bucketful to empty the token bucket completely. - write_bucketful() - - # The next bucketful should be written at the rate limit. - self.assertAlmostEqual( - write_bucketful(), MAX_KB_PER_SECOND, - delta=MAX_KB_PER_SECOND * 0.1 - ) - - # Once the token bucket refills, we should go back to full speed. - mock_sleep(BUCKET_KB / MAX_KB_PER_SECOND) - self.assertGreater(write_bucketful(), MAX_KB_PER_SECOND * 2) diff --git a/Lib/test/test_baseexception.py b/Lib/test/test_baseexception.py index e19162a6ab..a73711c416 100644 --- a/Lib/test/test_baseexception.py +++ b/Lib/test/test_baseexception.py @@ -79,10 +79,9 @@ def test_inheritance(self): finally: inheritance_tree.close() - # Underscore-prefixed (private) exceptions don't need to be documented - exc_set = set(e for e in exc_set if not e.startswith('_')) # RUSTPYTHON specific exc_set.discard("JitError") + self.assertEqual(len(exc_set), 0, "%s not accounted for" % exc_set) interface_tests = ("length", "args", "str", "repr") @@ -136,7 +135,7 @@ class Value(str): d[HashThisKeyWillClearTheDict()] = Value() # refcount of Value() is 1 now - # Exception.__setstate__ should acquire a strong reference of key and + # Exception.__setstate__ should aquire a strong reference of key and # value in the dict. Otherwise, Value()'s refcount would go below # zero in the tp_hash call in PyObject_SetAttr(), and it would cause # crash in GC. diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py index b716d6016b..1f0b9adc36 100644 --- a/Lib/test/test_bz2.py +++ b/Lib/test/test_bz2.py @@ -676,8 +676,6 @@ def testCompress4G(self, size): finally: data = None - # TODO: RUSTPYTHON - @unittest.expectedFailure def testPickle(self): for proto in range(pickle.HIGHEST_PROTOCOL + 1): with self.assertRaises(TypeError): @@ -736,8 +734,6 @@ def testDecompress4G(self, size): compressed = None decompressed = None - # TODO: RUSTPYTHON - @unittest.expectedFailure def testPickle(self): for proto in range(pickle.HIGHEST_PROTOCOL + 1): with self.assertRaises(TypeError): @@ -1005,8 +1001,6 @@ def test_encoding_error_handler(self): as f: self.assertEqual(f.read(), "foobar") - # TODO: RUSTPYTHON - @unittest.expectedFailure def test_newline(self): # Test with explicit newline (universal newline mode disabled). text = self.TEXT.decode("ascii") diff --git a/Lib/test/test_heapq.py b/Lib/test/test_heapq.py index 1aa8e4e289..cb1e4505b0 100644 --- a/Lib/test/test_heapq.py +++ b/Lib/test/test_heapq.py @@ -4,6 +4,7 @@ import unittest import doctest +from test import support from test.support import import_helper from unittest import TestCase, skipUnless from operator import itemgetter diff --git a/Lib/test/test_linecache.py b/Lib/test/test_linecache.py index e23e1cc942..72dd40136c 100644 --- a/Lib/test/test_linecache.py +++ b/Lib/test/test_linecache.py @@ -5,10 +5,8 @@ import os.path import tempfile import tokenize -from importlib.machinery import ModuleSpec from test import support from test.support import os_helper -from test.support.script_helper import assert_python_ok FILENAME = linecache.__file__ @@ -84,10 +82,6 @@ def test_getlines(self): class EmptyFile(GetLineTestsGoodData, unittest.TestCase): file_list = [] - def test_getlines(self): - lines = linecache.getlines(self.file_name) - self.assertEqual(lines, ['\n']) - class SingleEmptyLine(GetLineTestsGoodData, unittest.TestCase): file_list = ['\n'] @@ -103,16 +97,6 @@ class BadUnicode_WithDeclaration(GetLineTestsBadData, unittest.TestCase): file_byte_string = b'# coding=utf-8\n\x80abc' -class FakeLoader: - def get_source(self, fullname): - return f'source for {fullname}' - - -class NoSourceLoader: - def get_source(self, fullname): - return None - - class LineCacheTests(unittest.TestCase): def test_getline(self): @@ -254,70 +238,6 @@ def raise_memoryerror(*args, **kwargs): self.assertEqual(lines3, []) self.assertEqual(linecache.getlines(FILENAME), lines) - def test_loader(self): - filename = 'scheme://path' - - for loader in (None, object(), NoSourceLoader()): - linecache.clearcache() - module_globals = {'__name__': 'a.b.c', '__loader__': loader} - self.assertEqual(linecache.getlines(filename, module_globals), []) - - linecache.clearcache() - module_globals = {'__name__': 'a.b.c', '__loader__': FakeLoader()} - self.assertEqual(linecache.getlines(filename, module_globals), - ['source for a.b.c\n']) - - for spec in (None, object(), ModuleSpec('', FakeLoader())): - linecache.clearcache() - module_globals = {'__name__': 'a.b.c', '__loader__': FakeLoader(), - '__spec__': spec} - self.assertEqual(linecache.getlines(filename, module_globals), - ['source for a.b.c\n']) - - linecache.clearcache() - spec = ModuleSpec('x.y.z', FakeLoader()) - module_globals = {'__name__': 'a.b.c', '__loader__': spec.loader, - '__spec__': spec} - self.assertEqual(linecache.getlines(filename, module_globals), - ['source for x.y.z\n']) - - def test_invalid_names(self): - for name, desc in [ - ('\x00', 'NUL bytes filename'), - (__file__ + '\x00', 'filename with embedded NUL bytes'), - # A filename with surrogate codes. A UnicodeEncodeError is raised - # by os.stat() upon querying, which is a subclass of ValueError. - ("\uD834\uDD1E.py", 'surrogate codes (MUSICAL SYMBOL G CLEF)'), - # For POSIX platforms, an OSError will be raised but for Windows - # platforms, a ValueError is raised due to the path_t converter. - # See: https://github.com/python/cpython/issues/122170 - ('a' * 1_000_000, 'very long filename'), - ]: - with self.subTest(f'updatecache: {desc}'): - linecache.clearcache() - lines = linecache.updatecache(name) - self.assertListEqual(lines, []) - self.assertNotIn(name, linecache.cache) - - # hack into the cache (it shouldn't be allowed - # but we never know what people do...) - for key, fullname in [(name, 'ok'), ('key', name), (name, name)]: - with self.subTest(f'checkcache: {desc}', - key=key, fullname=fullname): - linecache.clearcache() - linecache.cache[key] = (0, 1234, [], fullname) - linecache.checkcache(key) - self.assertNotIn(key, linecache.cache) - - # just to be sure that we did not mess with cache - linecache.clearcache() - - def test_linecache_python_string(self): - cmdline = "import linecache;assert len(linecache.cache) == 0" - retcode, stdout, stderr = assert_python_ok('-c', cmdline) - self.assertEqual(retcode, 0) - self.assertEqual(stdout, b'') - self.assertEqual(stderr, b'') class LineCacheInvalidationTests(unittest.TestCase): def setUp(self): diff --git a/Lib/test/test_pprint.py b/Lib/test/test_pprint.py index 4e6fed1ab9..6ea7e7db2c 100644 --- a/Lib/test/test_pprint.py +++ b/Lib/test/test_pprint.py @@ -7,8 +7,8 @@ import itertools import pprint import random -import re import test.support +import test.test_set import types import unittest @@ -535,10 +535,7 @@ def test_dataclass_with_repr(self): def test_dataclass_no_repr(self): dc = dataclass3() formatted = pprint.pformat(dc, width=10) - self.assertRegex( - formatted, - fr"<{re.escape(__name__)}.dataclass3 object at \w+>", - ) + self.assertRegex(formatted, r"") def test_recursive_dataclass(self): dc = dataclass4(None) @@ -622,6 +619,9 @@ def test_set_reprs(self): self.assertEqual(pprint.pformat(frozenset3(range(7)), width=20), 'frozenset3({0, 1, 2, 3, 4, 5, 6})') + @unittest.expectedFailure + #See http://bugs.python.org/issue13907 + @test.support.cpython_only def test_set_of_sets_reprs(self): # This test creates a complex arrangement of frozensets and # compares the pretty-printed repr against a string hard-coded in @@ -632,106 +632,204 @@ def test_set_of_sets_reprs(self): # partial ordering (subset relationships), the output of the # list.sort() method is undefined for lists of sets." # + # In a nutshell, the test assumes frozenset({0}) will always + # sort before frozenset({1}), but: + # # >>> frozenset({0}) < frozenset({1}) # False # >>> frozenset({1}) < frozenset({0}) # False # - # In this test we list all possible invariants of the result - # for unordered frozensets. - # - # This test has a long history, see: - # - https://github.com/python/cpython/commit/969fe57baa0eb80332990f9cda936a33e13fabef - # - https://github.com/python/cpython/issues/58115 - # - https://github.com/python/cpython/issues/111147 - - import textwrap - - # Single-line, always ordered: - fs0 = frozenset() - fs1 = frozenset(('abc', 'xyz')) - data = frozenset((fs0, fs1)) - self.assertEqual(pprint.pformat(data), - 'frozenset({%r, %r})' % (fs0, fs1)) - self.assertEqual(pprint.pformat(data), repr(data)) - - fs2 = frozenset(('one', 'two')) - data = {fs2: frozenset((fs0, fs1))} - self.assertEqual(pprint.pformat(data), - "{%r: frozenset({%r, %r})}" % (fs2, fs0, fs1)) - self.assertEqual(pprint.pformat(data), repr(data)) - - # Single-line, unordered: - fs1 = frozenset(("xyz", "qwerty")) - fs2 = frozenset(("abcd", "spam")) - fs = frozenset((fs1, fs2)) - self.assertEqual(pprint.pformat(fs), repr(fs)) - - # Multiline, unordered: - def check(res, invariants): - self.assertIn(res, [textwrap.dedent(i).strip() for i in invariants]) - - # Inner-most frozensets are singleline, result is multiline, unordered: - fs1 = frozenset(('regular string', 'other string')) - fs2 = frozenset(('third string', 'one more string')) - check( - pprint.pformat(frozenset((fs1, fs2))), - [ - """ - frozenset({%r, - %r}) - """ % (fs1, fs2), - """ - frozenset({%r, - %r}) - """ % (fs2, fs1), - ], - ) - - # Everything is multiline, unordered: - check( - pprint.pformat( - frozenset(( - frozenset(( - "xyz very-very long string", - "qwerty is also absurdly long", - )), - frozenset(( - "abcd is even longer that before", - "spam is not so long", - )), - )), - ), - [ - """ - frozenset({frozenset({'abcd is even longer that before', - 'spam is not so long'}), - frozenset({'qwerty is also absurdly long', - 'xyz very-very long string'})}) - """, - - """ - frozenset({frozenset({'abcd is even longer that before', - 'spam is not so long'}), - frozenset({'xyz very-very long string', - 'qwerty is also absurdly long'})}) - """, - - """ - frozenset({frozenset({'qwerty is also absurdly long', - 'xyz very-very long string'}), - frozenset({'abcd is even longer that before', - 'spam is not so long'})}) - """, - - """ - frozenset({frozenset({'qwerty is also absurdly long', - 'xyz very-very long string'}), - frozenset({'spam is not so long', - 'abcd is even longer that before'})}) - """, - ], - ) + # Consequently, this test is fragile and + # implementation-dependent. Small changes to Python's sort + # algorithm cause the test to fail when it should pass. + # XXX Or changes to the dictionary implementation... + + cube_repr_tgt = """\ +{frozenset(): frozenset({frozenset({2}), frozenset({0}), frozenset({1})}), + frozenset({0}): frozenset({frozenset(), + frozenset({0, 2}), + frozenset({0, 1})}), + frozenset({1}): frozenset({frozenset(), + frozenset({1, 2}), + frozenset({0, 1})}), + frozenset({2}): frozenset({frozenset(), + frozenset({1, 2}), + frozenset({0, 2})}), + frozenset({1, 2}): frozenset({frozenset({2}), + frozenset({1}), + frozenset({0, 1, 2})}), + frozenset({0, 2}): frozenset({frozenset({2}), + frozenset({0}), + frozenset({0, 1, 2})}), + frozenset({0, 1}): frozenset({frozenset({0}), + frozenset({1}), + frozenset({0, 1, 2})}), + frozenset({0, 1, 2}): frozenset({frozenset({1, 2}), + frozenset({0, 2}), + frozenset({0, 1})})}""" + cube = test.test_set.cube(3) + self.assertEqual(pprint.pformat(cube), cube_repr_tgt) + cubo_repr_tgt = """\ +{frozenset({frozenset({0, 2}), frozenset({0})}): frozenset({frozenset({frozenset({0, + 2}), + frozenset({0, + 1, + 2})}), + frozenset({frozenset({0}), + frozenset({0, + 1})}), + frozenset({frozenset(), + frozenset({0})}), + frozenset({frozenset({2}), + frozenset({0, + 2})})}), + frozenset({frozenset({0, 1}), frozenset({1})}): frozenset({frozenset({frozenset({0, + 1}), + frozenset({0, + 1, + 2})}), + frozenset({frozenset({0}), + frozenset({0, + 1})}), + frozenset({frozenset({1}), + frozenset({1, + 2})}), + frozenset({frozenset(), + frozenset({1})})}), + frozenset({frozenset({1, 2}), frozenset({1})}): frozenset({frozenset({frozenset({1, + 2}), + frozenset({0, + 1, + 2})}), + frozenset({frozenset({2}), + frozenset({1, + 2})}), + frozenset({frozenset(), + frozenset({1})}), + frozenset({frozenset({1}), + frozenset({0, + 1})})}), + frozenset({frozenset({1, 2}), frozenset({2})}): frozenset({frozenset({frozenset({1, + 2}), + frozenset({0, + 1, + 2})}), + frozenset({frozenset({1}), + frozenset({1, + 2})}), + frozenset({frozenset({2}), + frozenset({0, + 2})}), + frozenset({frozenset(), + frozenset({2})})}), + frozenset({frozenset(), frozenset({0})}): frozenset({frozenset({frozenset({0}), + frozenset({0, + 1})}), + frozenset({frozenset({0}), + frozenset({0, + 2})}), + frozenset({frozenset(), + frozenset({1})}), + frozenset({frozenset(), + frozenset({2})})}), + frozenset({frozenset(), frozenset({1})}): frozenset({frozenset({frozenset(), + frozenset({0})}), + frozenset({frozenset({1}), + frozenset({1, + 2})}), + frozenset({frozenset(), + frozenset({2})}), + frozenset({frozenset({1}), + frozenset({0, + 1})})}), + frozenset({frozenset({2}), frozenset()}): frozenset({frozenset({frozenset({2}), + frozenset({1, + 2})}), + frozenset({frozenset(), + frozenset({0})}), + frozenset({frozenset(), + frozenset({1})}), + frozenset({frozenset({2}), + frozenset({0, + 2})})}), + frozenset({frozenset({0, 1, 2}), frozenset({0, 1})}): frozenset({frozenset({frozenset({1, + 2}), + frozenset({0, + 1, + 2})}), + frozenset({frozenset({0, + 2}), + frozenset({0, + 1, + 2})}), + frozenset({frozenset({0}), + frozenset({0, + 1})}), + frozenset({frozenset({1}), + frozenset({0, + 1})})}), + frozenset({frozenset({0}), frozenset({0, 1})}): frozenset({frozenset({frozenset(), + frozenset({0})}), + frozenset({frozenset({0, + 1}), + frozenset({0, + 1, + 2})}), + frozenset({frozenset({0}), + frozenset({0, + 2})}), + frozenset({frozenset({1}), + frozenset({0, + 1})})}), + frozenset({frozenset({2}), frozenset({0, 2})}): frozenset({frozenset({frozenset({0, + 2}), + frozenset({0, + 1, + 2})}), + frozenset({frozenset({2}), + frozenset({1, + 2})}), + frozenset({frozenset({0}), + frozenset({0, + 2})}), + frozenset({frozenset(), + frozenset({2})})}), + frozenset({frozenset({0, 1, 2}), frozenset({0, 2})}): frozenset({frozenset({frozenset({1, + 2}), + frozenset({0, + 1, + 2})}), + frozenset({frozenset({0, + 1}), + frozenset({0, + 1, + 2})}), + frozenset({frozenset({0}), + frozenset({0, + 2})}), + frozenset({frozenset({2}), + frozenset({0, + 2})})}), + frozenset({frozenset({1, 2}), frozenset({0, 1, 2})}): frozenset({frozenset({frozenset({0, + 2}), + frozenset({0, + 1, + 2})}), + frozenset({frozenset({0, + 1}), + frozenset({0, + 1, + 2})}), + frozenset({frozenset({2}), + frozenset({1, + 2})}), + frozenset({frozenset({1}), + frozenset({1, + 2})})})}""" + + cubo = test.test_set.linegraph(cube) + self.assertEqual(pprint.pformat(cubo), cubo_repr_tgt) def test_depth(self): nested_tuple = (1, (2, (3, (4, (5, 6))))) diff --git a/Lib/test/test_queue.py b/Lib/test/test_queue.py index 93cbe1fe23..cfa6003a86 100644 --- a/Lib/test/test_queue.py +++ b/Lib/test/test_queue.py @@ -2,7 +2,6 @@ # to ensure the Queue locks remain stable. import itertools import random -import sys import threading import time import unittest @@ -11,8 +10,6 @@ from test.support import import_helper from test.support import threading_helper -# queue module depends on threading primitives -threading_helper.requires_working_threading(module=True) py_queue = import_helper.import_fresh_module('queue', blocked=['_queue']) c_queue = import_helper.import_fresh_module('queue', fresh=['_queue']) @@ -242,418 +239,6 @@ def test_shrinking_queue(self): with self.assertRaises(self.queue.Full): q.put_nowait(4) - def test_shutdown_empty(self): - q = self.type2test() - q.shutdown() - with self.assertRaises(self.queue.ShutDown): - q.put("data") - with self.assertRaises(self.queue.ShutDown): - q.get() - - def test_shutdown_nonempty(self): - q = self.type2test() - q.put("data") - q.shutdown() - q.get() - with self.assertRaises(self.queue.ShutDown): - q.get() - - def test_shutdown_immediate(self): - q = self.type2test() - q.put("data") - q.shutdown(immediate=True) - with self.assertRaises(self.queue.ShutDown): - q.get() - - def test_shutdown_allowed_transitions(self): - # allowed transitions would be from alive via shutdown to immediate - q = self.type2test() - self.assertFalse(q.is_shutdown) - - q.shutdown() - self.assertTrue(q.is_shutdown) - - q.shutdown(immediate=True) - self.assertTrue(q.is_shutdown) - - q.shutdown(immediate=False) - - def _shutdown_all_methods_in_one_thread(self, immediate): - q = self.type2test(2) - q.put("L") - q.put_nowait("O") - q.shutdown(immediate) - - with self.assertRaises(self.queue.ShutDown): - q.put("E") - with self.assertRaises(self.queue.ShutDown): - q.put_nowait("W") - if immediate: - with self.assertRaises(self.queue.ShutDown): - q.get() - with self.assertRaises(self.queue.ShutDown): - q.get_nowait() - with self.assertRaises(ValueError): - q.task_done() - q.join() - else: - self.assertIn(q.get(), "LO") - q.task_done() - self.assertIn(q.get(), "LO") - q.task_done() - q.join() - # on shutdown(immediate=False) - # when queue is empty, should raise ShutDown Exception - with self.assertRaises(self.queue.ShutDown): - q.get() # p.get(True) - with self.assertRaises(self.queue.ShutDown): - q.get_nowait() # p.get(False) - with self.assertRaises(self.queue.ShutDown): - q.get(True, 1.0) - - def test_shutdown_all_methods_in_one_thread(self): - return self._shutdown_all_methods_in_one_thread(False) - - def test_shutdown_immediate_all_methods_in_one_thread(self): - return self._shutdown_all_methods_in_one_thread(True) - - def _write_msg_thread(self, q, n, results, - i_when_exec_shutdown, event_shutdown, - barrier_start): - # All `write_msg_threads` - # put several items into the queue. - for i in range(0, i_when_exec_shutdown//2): - q.put((i, 'LOYD')) - # Wait for the barrier to be complete. - barrier_start.wait() - - for i in range(i_when_exec_shutdown//2, n): - try: - q.put((i, "YDLO")) - except self.queue.ShutDown: - results.append(False) - break - - # Trigger queue shutdown. - if i == i_when_exec_shutdown: - # Only one thread should call shutdown(). - if not event_shutdown.is_set(): - event_shutdown.set() - results.append(True) - - def _read_msg_thread(self, q, results, barrier_start): - # Get at least one item. - q.get(True) - q.task_done() - # Wait for the barrier to be complete. - barrier_start.wait() - while True: - try: - q.get(False) - q.task_done() - except self.queue.ShutDown: - results.append(True) - break - except self.queue.Empty: - pass - - def _shutdown_thread(self, q, results, event_end, immediate): - event_end.wait() - q.shutdown(immediate) - results.append(q.qsize() == 0) - - def _join_thread(self, q, barrier_start): - # Wait for the barrier to be complete. - barrier_start.wait() - q.join() - - def _shutdown_all_methods_in_many_threads(self, immediate): - # Run a 'multi-producers/consumers queue' use case, - # with enough items into the queue. - # When shutdown, all running threads will be joined. - q = self.type2test() - ps = [] - res_puts = [] - res_gets = [] - res_shutdown = [] - write_threads = 4 - read_threads = 6 - join_threads = 2 - nb_msgs = 1024*64 - nb_msgs_w = nb_msgs // write_threads - when_exec_shutdown = nb_msgs_w // 2 - # Use of a Barrier to ensure that - # - all write threads put all their items into the queue, - # - all read thread get at least one item from the queue, - # and keep on running until shutdown. - # The join thread is started only when shutdown is immediate. - nparties = write_threads + read_threads - if immediate: - nparties += join_threads - barrier_start = threading.Barrier(nparties) - ev_exec_shutdown = threading.Event() - lprocs = [ - (self._write_msg_thread, write_threads, (q, nb_msgs_w, res_puts, - when_exec_shutdown, ev_exec_shutdown, - barrier_start)), - (self._read_msg_thread, read_threads, (q, res_gets, barrier_start)), - (self._shutdown_thread, 1, (q, res_shutdown, ev_exec_shutdown, immediate)), - ] - if immediate: - lprocs.append((self._join_thread, join_threads, (q, barrier_start))) - # start all threads. - for func, n, args in lprocs: - for i in range(n): - ps.append(threading.Thread(target=func, args=args)) - ps[-1].start() - for thread in ps: - thread.join() - - self.assertTrue(True in res_puts) - self.assertEqual(res_gets.count(True), read_threads) - if immediate: - self.assertListEqual(res_shutdown, [True]) - self.assertTrue(q.empty()) - - def test_shutdown_all_methods_in_many_threads(self): - return self._shutdown_all_methods_in_many_threads(False) - - def test_shutdown_immediate_all_methods_in_many_threads(self): - return self._shutdown_all_methods_in_many_threads(True) - - def _get(self, q, go, results, shutdown=False): - go.wait() - try: - msg = q.get() - results.append(not shutdown) - return not shutdown - except self.queue.ShutDown: - results.append(shutdown) - return shutdown - - def _get_shutdown(self, q, go, results): - return self._get(q, go, results, True) - - def _get_task_done(self, q, go, results): - go.wait() - try: - msg = q.get() - q.task_done() - results.append(True) - return msg - except self.queue.ShutDown: - results.append(False) - return False - - def _put(self, q, msg, go, results, shutdown=False): - go.wait() - try: - q.put(msg) - results.append(not shutdown) - return not shutdown - except self.queue.ShutDown: - results.append(shutdown) - return shutdown - - def _put_shutdown(self, q, msg, go, results): - return self._put(q, msg, go, results, True) - - def _join(self, q, results, shutdown=False): - try: - q.join() - results.append(not shutdown) - return not shutdown - except self.queue.ShutDown: - results.append(shutdown) - return shutdown - - def _join_shutdown(self, q, results): - return self._join(q, results, True) - - def _shutdown_get(self, immediate): - q = self.type2test(2) - results = [] - go = threading.Event() - q.put("Y") - q.put("D") - # queue full - - if immediate: - thrds = ( - (self._get_shutdown, (q, go, results)), - (self._get_shutdown, (q, go, results)), - ) - else: - thrds = ( - # on shutdown(immediate=False) - # one of these threads should raise Shutdown - (self._get, (q, go, results)), - (self._get, (q, go, results)), - (self._get, (q, go, results)), - ) - threads = [] - for func, params in thrds: - threads.append(threading.Thread(target=func, args=params)) - threads[-1].start() - q.shutdown(immediate) - go.set() - for t in threads: - t.join() - if immediate: - self.assertListEqual(results, [True, True]) - else: - self.assertListEqual(sorted(results), [False] + [True]*(len(thrds)-1)) - - def test_shutdown_get(self): - return self._shutdown_get(False) - - def test_shutdown_immediate_get(self): - return self._shutdown_get(True) - - def _shutdown_put(self, immediate): - q = self.type2test(2) - results = [] - go = threading.Event() - q.put("Y") - q.put("D") - # queue fulled - - thrds = ( - (self._put_shutdown, (q, "E", go, results)), - (self._put_shutdown, (q, "W", go, results)), - ) - threads = [] - for func, params in thrds: - threads.append(threading.Thread(target=func, args=params)) - threads[-1].start() - q.shutdown() - go.set() - for t in threads: - t.join() - - self.assertEqual(results, [True]*len(thrds)) - - def test_shutdown_put(self): - return self._shutdown_put(False) - - def test_shutdown_immediate_put(self): - return self._shutdown_put(True) - - def _shutdown_join(self, immediate): - q = self.type2test() - results = [] - q.put("Y") - go = threading.Event() - nb = q.qsize() - - thrds = ( - (self._join, (q, results)), - (self._join, (q, results)), - ) - threads = [] - for func, params in thrds: - threads.append(threading.Thread(target=func, args=params)) - threads[-1].start() - if not immediate: - res = [] - for i in range(nb): - threads.append(threading.Thread(target=self._get_task_done, args=(q, go, res))) - threads[-1].start() - q.shutdown(immediate) - go.set() - for t in threads: - t.join() - - self.assertEqual(results, [True]*len(thrds)) - - def test_shutdown_immediate_join(self): - return self._shutdown_join(True) - - def test_shutdown_join(self): - return self._shutdown_join(False) - - def _shutdown_put_join(self, immediate): - q = self.type2test(2) - results = [] - go = threading.Event() - q.put("Y") - # queue not fulled - - thrds = ( - (self._put_shutdown, (q, "E", go, results)), - (self._join, (q, results)), - ) - threads = [] - for func, params in thrds: - threads.append(threading.Thread(target=func, args=params)) - threads[-1].start() - self.assertEqual(q.unfinished_tasks, 1) - - q.shutdown(immediate) - go.set() - - if immediate: - with self.assertRaises(self.queue.ShutDown): - q.get_nowait() - else: - result = q.get() - self.assertEqual(result, "Y") - q.task_done() - - for t in threads: - t.join() - - self.assertEqual(results, [True]*len(thrds)) - - def test_shutdown_immediate_put_join(self): - return self._shutdown_put_join(True) - - def test_shutdown_put_join(self): - return self._shutdown_put_join(False) - - def test_shutdown_get_task_done_join(self): - q = self.type2test(2) - results = [] - go = threading.Event() - q.put("Y") - q.put("D") - self.assertEqual(q.unfinished_tasks, q.qsize()) - - thrds = ( - (self._get_task_done, (q, go, results)), - (self._get_task_done, (q, go, results)), - (self._join, (q, results)), - (self._join, (q, results)), - ) - threads = [] - for func, params in thrds: - threads.append(threading.Thread(target=func, args=params)) - threads[-1].start() - go.set() - q.shutdown(False) - for t in threads: - t.join() - - self.assertEqual(results, [True]*len(thrds)) - - def test_shutdown_pending_get(self): - def get(): - try: - results.append(q.get()) - except Exception as e: - results.append(e) - - q = self.type2test() - results = [] - get_thread = threading.Thread(target=get) - get_thread.start() - q.shutdown(immediate=False) - get_thread.join(timeout=10.0) - self.assertFalse(get_thread.is_alive()) - self.assertEqual(len(results), 1) - self.assertIsInstance(results[0], self.queue.ShutDown) - - class QueueTest(BaseQueueTestMixin): def setUp(self): @@ -704,7 +289,6 @@ class CPriorityQueueTest(PriorityQueueTest, unittest.TestCase): # A Queue subclass that can provoke failure at a moment's notice :) class FailingQueueException(Exception): pass - class FailingQueueTest(BlockingTestMixin): def setUp(self): diff --git a/Lib/test/test_sched.py b/Lib/test/test_sched.py index eb52ac7983..7ae7baae85 100644 --- a/Lib/test/test_sched.py +++ b/Lib/test/test_sched.py @@ -58,7 +58,6 @@ def test_enterabs(self): scheduler.run() self.assertEqual(l, [0.01, 0.02, 0.03, 0.04, 0.05]) - @threading_helper.requires_working_threading() def test_enter_concurrent(self): q = queue.Queue() fun = q.put @@ -92,23 +91,10 @@ def test_priority(self): l = [] fun = lambda x: l.append(x) scheduler = sched.scheduler(time.time, time.sleep) - - cases = [ - ([1, 2, 3, 4, 5], [1, 2, 3, 4, 5]), - ([5, 4, 3, 2, 1], [1, 2, 3, 4, 5]), - ([2, 5, 3, 1, 4], [1, 2, 3, 4, 5]), - ([1, 2, 3, 2, 1], [1, 1, 2, 2, 3]), - ] - for priorities, expected in cases: - with self.subTest(priorities=priorities, expected=expected): - for priority in priorities: - scheduler.enterabs(0.01, priority, fun, (priority,)) - scheduler.run() - self.assertEqual(l, expected) - - # Cleanup: - self.assertTrue(scheduler.empty()) - l.clear() + for priority in [1, 2, 3, 4, 5]: + z = scheduler.enterabs(0.01, priority, fun, (priority,)) + scheduler.run() + self.assertEqual(l, [1, 2, 3, 4, 5]) def test_cancel(self): l = [] @@ -125,7 +111,6 @@ def test_cancel(self): scheduler.run() self.assertEqual(l, [0.02, 0.03, 0.04]) - @threading_helper.requires_working_threading() def test_cancel_concurrent(self): q = queue.Queue() fun = q.put diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index b64ccb37a5..16416547c1 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -2000,9 +2000,13 @@ def check_unpack_tarball(self, format): ('Python 3.14', DeprecationWarning)): self.check_unpack_archive(format) + # TODO: RUSTPYTHON + @unittest.expectedFailure def test_unpack_archive_tar(self): self.check_unpack_tarball('tar') + # TODO: RUSTPYTHON + @unittest.expectedFailure @support.requires_zlib() def test_unpack_archive_gztar(self): self.check_unpack_tarball('gztar') diff --git a/Lib/test/test_wave.py b/Lib/test/test_wave.py deleted file mode 100644 index 5e771c8de9..0000000000 --- a/Lib/test/test_wave.py +++ /dev/null @@ -1,227 +0,0 @@ -import unittest -from test import audiotests -from test import support -import io -import struct -import sys -import wave - - -class WaveTest(audiotests.AudioWriteTests, - audiotests.AudioTestsWithSourceFile): - module = wave - - -class WavePCM8Test(WaveTest, unittest.TestCase): - sndfilename = 'pluck-pcm8.wav' - sndfilenframes = 3307 - nchannels = 2 - sampwidth = 1 - framerate = 11025 - nframes = 48 - comptype = 'NONE' - compname = 'not compressed' - frames = bytes.fromhex("""\ - 827F CB80 B184 0088 4B86 C883 3F81 837E 387A 3473 A96B 9A66 \ - 6D64 4662 8E60 6F60 D762 7B68 936F 5877 177B 757C 887B 5F7B \ - 917A BE7B 3C7C E67F 4F84 C389 418E D192 6E97 0296 FF94 0092 \ - C98E D28D 6F8F 4E8F 648C E38A 888A AB8B D18E 0B91 368E C48A \ - """) - - -class WavePCM16Test(WaveTest, unittest.TestCase): - sndfilename = 'pluck-pcm16.wav' - sndfilenframes = 3307 - nchannels = 2 - sampwidth = 2 - framerate = 11025 - nframes = 48 - comptype = 'NONE' - compname = 'not compressed' - frames = bytes.fromhex("""\ - 022EFFEA 4B5C00F9 311404EF 80DC0843 CBDF06B2 48AA03F3 BFE701B2 036BFE7C \ - B857FA3E B4B2F34F 2999EBCA 1A5FE6D7 EDFCE491 C626E279 0E05E0B8 EF27E02D \ - 5754E275 FB31E843 1373EF89 D827F72C 978BFB7A F5F7FC11 0866FB9C DF30FB42 \ - 117FFA36 3EE4FB5D BC75FCB6 66D5FF5F CF16040E 43220978 C1BC0EC8 511F12A4 \ - EEDF1755 82061666 7FFF1446 80001296 499C0EB2 52BA0DB9 EFB70F5C CE400FBC \ - E4B50CEB 63440A5A 08CA0A1F 2BBA0B0B 51460E47 8BCB113C B6F50EEA 44150A59 \ - """) - if sys.byteorder != 'big': - frames = wave._byteswap(frames, 2) - - -class WavePCM24Test(WaveTest, unittest.TestCase): - sndfilename = 'pluck-pcm24.wav' - sndfilenframes = 3307 - nchannels = 2 - sampwidth = 3 - framerate = 11025 - nframes = 48 - comptype = 'NONE' - compname = 'not compressed' - frames = bytes.fromhex("""\ - 022D65FFEB9D 4B5A0F00FA54 3113C304EE2B 80DCD6084303 \ - CBDEC006B261 48A99803F2F8 BFE82401B07D 036BFBFE7B5D \ - B85756FA3EC9 B4B055F3502B 299830EBCB62 1A5CA7E6D99A \ - EDFA3EE491BD C625EBE27884 0E05A9E0B6CF EF2929E02922 \ - 5758D8E27067 FB3557E83E16 1377BFEF8402 D82C5BF7272A \ - 978F16FB7745 F5F865FC1013 086635FB9C4E DF30FCFB40EE \ - 117FE0FA3438 3EE6B8FB5AC3 BC77A3FCB2F4 66D6DAFF5F32 \ - CF13B9041275 431D69097A8C C1BB600EC74E 5120B912A2BA \ - EEDF641754C0 8207001664B7 7FFFFF14453F 8000001294E6 \ - 499C1B0EB3B2 52B73E0DBCA0 EFB2B20F5FD8 CE3CDB0FBE12 \ - E4B49C0CEA2D 6344A80A5A7C 08C8FE0A1FFE 2BB9860B0A0E \ - 51486F0E44E1 8BCC64113B05 B6F4EC0EEB36 4413170A5B48 \ - """) - if sys.byteorder != 'big': - frames = wave._byteswap(frames, 3) - - -class WavePCM24ExtTest(WaveTest, unittest.TestCase): - sndfilename = 'pluck-pcm24-ext.wav' - sndfilenframes = 3307 - nchannels = 2 - sampwidth = 3 - framerate = 11025 - nframes = 48 - comptype = 'NONE' - compname = 'not compressed' - frames = bytes.fromhex("""\ - 022D65FFEB9D 4B5A0F00FA54 3113C304EE2B 80DCD6084303 \ - CBDEC006B261 48A99803F2F8 BFE82401B07D 036BFBFE7B5D \ - B85756FA3EC9 B4B055F3502B 299830EBCB62 1A5CA7E6D99A \ - EDFA3EE491BD C625EBE27884 0E05A9E0B6CF EF2929E02922 \ - 5758D8E27067 FB3557E83E16 1377BFEF8402 D82C5BF7272A \ - 978F16FB7745 F5F865FC1013 086635FB9C4E DF30FCFB40EE \ - 117FE0FA3438 3EE6B8FB5AC3 BC77A3FCB2F4 66D6DAFF5F32 \ - CF13B9041275 431D69097A8C C1BB600EC74E 5120B912A2BA \ - EEDF641754C0 8207001664B7 7FFFFF14453F 8000001294E6 \ - 499C1B0EB3B2 52B73E0DBCA0 EFB2B20F5FD8 CE3CDB0FBE12 \ - E4B49C0CEA2D 6344A80A5A7C 08C8FE0A1FFE 2BB9860B0A0E \ - 51486F0E44E1 8BCC64113B05 B6F4EC0EEB36 4413170A5B48 \ - """) - if sys.byteorder != 'big': - frames = wave._byteswap(frames, 3) - - -class WavePCM32Test(WaveTest, unittest.TestCase): - sndfilename = 'pluck-pcm32.wav' - sndfilenframes = 3307 - nchannels = 2 - sampwidth = 4 - framerate = 11025 - nframes = 48 - comptype = 'NONE' - compname = 'not compressed' - frames = bytes.fromhex("""\ - 022D65BCFFEB9D92 4B5A0F8000FA549C 3113C34004EE2BC0 80DCD680084303E0 \ - CBDEC0C006B26140 48A9980003F2F8FC BFE8248001B07D92 036BFB60FE7B5D34 \ - B8575600FA3EC920 B4B05500F3502BC0 29983000EBCB6240 1A5CA7A0E6D99A60 \ - EDFA3E80E491BD40 C625EB80E27884A0 0E05A9A0E0B6CFE0 EF292940E0292280 \ - 5758D800E2706700 FB3557D8E83E1640 1377BF00EF840280 D82C5B80F7272A80 \ - 978F1600FB774560 F5F86510FC101364 086635A0FB9C4E20 DF30FC40FB40EE28 \ - 117FE0A0FA3438B0 3EE6B840FB5AC3F0 BC77A380FCB2F454 66D6DA80FF5F32B4 \ - CF13B980041275B0 431D6980097A8C00 C1BB60000EC74E00 5120B98012A2BAA0 \ - EEDF64C01754C060 820700001664B780 7FFFFFFF14453F40 800000001294E6E0 \ - 499C1B000EB3B270 52B73E000DBCA020 EFB2B2E00F5FD880 CE3CDB400FBE1270 \ - E4B49CC00CEA2D90 6344A8800A5A7CA0 08C8FE800A1FFEE0 2BB986C00B0A0E00 \ - 51486F800E44E190 8BCC6480113B0580 B6F4EC000EEB3630 441317800A5B48A0 \ - """) - if sys.byteorder != 'big': - frames = wave._byteswap(frames, 4) - - -class MiscTestCase(unittest.TestCase): - def test__all__(self): - not_exported = {'WAVE_FORMAT_PCM', 'WAVE_FORMAT_EXTENSIBLE', 'KSDATAFORMAT_SUBTYPE_PCM'} - support.check__all__(self, wave, not_exported=not_exported) - - def test_read_deprecations(self): - filename = support.findfile('pluck-pcm8.wav', subdir='audiodata') - with wave.open(filename) as reader: - with self.assertWarns(DeprecationWarning): - with self.assertRaises(wave.Error): - reader.getmark('mark') - with self.assertWarns(DeprecationWarning): - self.assertIsNone(reader.getmarkers()) - - def test_write_deprecations(self): - with io.BytesIO(b'') as tmpfile: - with wave.open(tmpfile, 'wb') as writer: - writer.setnchannels(1) - writer.setsampwidth(1) - writer.setframerate(1) - writer.setcomptype('NONE', 'not compressed') - - with self.assertWarns(DeprecationWarning): - with self.assertRaises(wave.Error): - writer.setmark(0, 0, 'mark') - with self.assertWarns(DeprecationWarning): - with self.assertRaises(wave.Error): - writer.getmark('mark') - with self.assertWarns(DeprecationWarning): - self.assertIsNone(writer.getmarkers()) - - -class WaveLowLevelTest(unittest.TestCase): - - def test_read_no_chunks(self): - b = b'SPAM' - with self.assertRaises(EOFError): - wave.open(io.BytesIO(b)) - - def test_read_no_riff_chunk(self): - b = b'SPAM' + struct.pack(' self.chunksize: - raise RuntimeError - self.file.seek(self.offset + pos, 0) - self.size_read = pos - - def tell(self): - if self.closed: - raise ValueError("I/O operation on closed file") - return self.size_read - - def read(self, size=-1): - """Read at most size bytes from the chunk. - If size is omitted or negative, read until the end - of the chunk. - """ - - if self.closed: - raise ValueError("I/O operation on closed file") - if self.size_read >= self.chunksize: - return b'' - if size < 0: - size = self.chunksize - self.size_read - if size > self.chunksize - self.size_read: - size = self.chunksize - self.size_read - data = self.file.read(size) - self.size_read = self.size_read + len(data) - if self.size_read == self.chunksize and \ - self.align and \ - (self.chunksize & 1): - dummy = self.file.read(1) - self.size_read = self.size_read + len(dummy) - return data - - def skip(self): - """Skip the rest of the chunk. - If you are not interested in the contents of the chunk, - this method should be called so that the file points to - the start of the next chunk. - """ - - if self.closed: - raise ValueError("I/O operation on closed file") - if self.seekable: - try: - n = self.chunksize - self.size_read - # maybe fix alignment - if self.align and (self.chunksize & 1): - n = n + 1 - self.file.seek(n, 1) - self.size_read = self.size_read + n - return - except OSError: - pass - while self.size_read < self.chunksize: - n = min(8192, self.chunksize - self.size_read) - dummy = self.read(n) - if not dummy: - raise EOFError - - -class Wave_read: - """Variables used in this class: - - These variables are available to the user though appropriate - methods of this class: - _file -- the open file with methods read(), close(), and seek() - set through the __init__() method - _nchannels -- the number of audio channels - available through the getnchannels() method - _nframes -- the number of audio frames - available through the getnframes() method - _sampwidth -- the number of bytes per audio sample - available through the getsampwidth() method - _framerate -- the sampling frequency - available through the getframerate() method - _comptype -- the AIFF-C compression type ('NONE' if AIFF) - available through the getcomptype() method - _compname -- the human-readable AIFF-C compression type - available through the getcomptype() method - _soundpos -- the position in the audio stream - available through the tell() method, set through the - setpos() method - - These variables are used internally only: - _fmt_chunk_read -- 1 iff the FMT chunk has been read - _data_seek_needed -- 1 iff positioned correctly in audio - file for readframes() - _data_chunk -- instantiation of a chunk class for the DATA chunk - _framesize -- size of one frame in the file - """ - - def initfp(self, file): - self._convert = None - self._soundpos = 0 - self._file = _Chunk(file, bigendian = 0) - if self._file.getname() != b'RIFF': - raise Error('file does not start with RIFF id') - if self._file.read(4) != b'WAVE': - raise Error('not a WAVE file') - self._fmt_chunk_read = 0 - self._data_chunk = None - while 1: - self._data_seek_needed = 1 - try: - chunk = _Chunk(self._file, bigendian = 0) - except EOFError: - break - chunkname = chunk.getname() - if chunkname == b'fmt ': - self._read_fmt_chunk(chunk) - self._fmt_chunk_read = 1 - elif chunkname == b'data': - if not self._fmt_chunk_read: - raise Error('data chunk before fmt chunk') - self._data_chunk = chunk - self._nframes = chunk.chunksize // self._framesize - self._data_seek_needed = 0 - break - chunk.skip() - if not self._fmt_chunk_read or not self._data_chunk: - raise Error('fmt chunk and/or data chunk missing') - - def __init__(self, f): - self._i_opened_the_file = None - if isinstance(f, str): - f = builtins.open(f, 'rb') - self._i_opened_the_file = f - # else, assume it is an open file object already - try: - self.initfp(f) - except: - if self._i_opened_the_file: - f.close() - raise - - def __del__(self): - self.close() - - def __enter__(self): - return self - - def __exit__(self, *args): - self.close() - - # - # User visible methods. - # - def getfp(self): - return self._file - - def rewind(self): - self._data_seek_needed = 1 - self._soundpos = 0 - - def close(self): - self._file = None - file = self._i_opened_the_file - if file: - self._i_opened_the_file = None - file.close() - - def tell(self): - return self._soundpos - - def getnchannels(self): - return self._nchannels - - def getnframes(self): - return self._nframes - - def getsampwidth(self): - return self._sampwidth - - def getframerate(self): - return self._framerate - - def getcomptype(self): - return self._comptype - - def getcompname(self): - return self._compname - - def getparams(self): - return _wave_params(self.getnchannels(), self.getsampwidth(), - self.getframerate(), self.getnframes(), - self.getcomptype(), self.getcompname()) - - def getmarkers(self): - import warnings - warnings._deprecated("Wave_read.getmarkers", remove=(3, 15)) - return None - - def getmark(self, id): - import warnings - warnings._deprecated("Wave_read.getmark", remove=(3, 15)) - raise Error('no marks') - - def setpos(self, pos): - if pos < 0 or pos > self._nframes: - raise Error('position not in range') - self._soundpos = pos - self._data_seek_needed = 1 - - def readframes(self, nframes): - if self._data_seek_needed: - self._data_chunk.seek(0, 0) - pos = self._soundpos * self._framesize - if pos: - self._data_chunk.seek(pos, 0) - self._data_seek_needed = 0 - if nframes == 0: - return b'' - data = self._data_chunk.read(nframes * self._framesize) - if self._sampwidth != 1 and sys.byteorder == 'big': - data = _byteswap(data, self._sampwidth) - if self._convert and data: - data = self._convert(data) - self._soundpos = self._soundpos + len(data) // (self._nchannels * self._sampwidth) - return data - - # - # Internal methods. - # - - def _read_fmt_chunk(self, chunk): - try: - wFormatTag, self._nchannels, self._framerate, dwAvgBytesPerSec, wBlockAlign = struct.unpack_from(' 4: - raise Error('bad sample width') - self._sampwidth = sampwidth - - def getsampwidth(self): - if not self._sampwidth: - raise Error('sample width not set') - return self._sampwidth - - def setframerate(self, framerate): - if self._datawritten: - raise Error('cannot change parameters after starting to write') - if framerate <= 0: - raise Error('bad frame rate') - self._framerate = int(round(framerate)) - - def getframerate(self): - if not self._framerate: - raise Error('frame rate not set') - return self._framerate - - def setnframes(self, nframes): - if self._datawritten: - raise Error('cannot change parameters after starting to write') - self._nframes = nframes - - def getnframes(self): - return self._nframeswritten - - def setcomptype(self, comptype, compname): - if self._datawritten: - raise Error('cannot change parameters after starting to write') - if comptype not in ('NONE',): - raise Error('unsupported compression type') - self._comptype = comptype - self._compname = compname - - def getcomptype(self): - return self._comptype - - def getcompname(self): - return self._compname - - def setparams(self, params): - nchannels, sampwidth, framerate, nframes, comptype, compname = params - if self._datawritten: - raise Error('cannot change parameters after starting to write') - self.setnchannels(nchannels) - self.setsampwidth(sampwidth) - self.setframerate(framerate) - self.setnframes(nframes) - self.setcomptype(comptype, compname) - - def getparams(self): - if not self._nchannels or not self._sampwidth or not self._framerate: - raise Error('not all parameters set') - return _wave_params(self._nchannels, self._sampwidth, self._framerate, - self._nframes, self._comptype, self._compname) - - def setmark(self, id, pos, name): - import warnings - warnings._deprecated("Wave_write.setmark", remove=(3, 15)) - raise Error('setmark() not supported') - - def getmark(self, id): - import warnings - warnings._deprecated("Wave_write.getmark", remove=(3, 15)) - raise Error('no marks') - - def getmarkers(self): - import warnings - warnings._deprecated("Wave_write.getmarkers", remove=(3, 15)) - return None - - def tell(self): - return self._nframeswritten - - def writeframesraw(self, data): - if not isinstance(data, (bytes, bytearray)): - data = memoryview(data).cast('B') - self._ensure_header_written(len(data)) - nframes = len(data) // (self._sampwidth * self._nchannels) - if self._convert: - data = self._convert(data) - if self._sampwidth != 1 and sys.byteorder == 'big': - data = _byteswap(data, self._sampwidth) - self._file.write(data) - self._datawritten += len(data) - self._nframeswritten = self._nframeswritten + nframes - - def writeframes(self, data): - self.writeframesraw(data) - if self._datalength != self._datawritten: - self._patchheader() - - def close(self): - try: - if self._file: - self._ensure_header_written(0) - if self._datalength != self._datawritten: - self._patchheader() - self._file.flush() - finally: - self._file = None - file = self._i_opened_the_file - if file: - self._i_opened_the_file = None - file.close() - - # - # Internal methods. - # - - def _ensure_header_written(self, datasize): - if not self._headerwritten: - if not self._nchannels: - raise Error('# channels not specified') - if not self._sampwidth: - raise Error('sample width not specified') - if not self._framerate: - raise Error('sampling rate not specified') - self._write_header(datasize) - - def _write_header(self, initlength): - assert not self._headerwritten - self._file.write(b'RIFF') - if not self._nframes: - self._nframes = initlength // (self._nchannels * self._sampwidth) - self._datalength = self._nframes * self._nchannels * self._sampwidth - try: - self._form_length_pos = self._file.tell() - except (AttributeError, OSError): - self._form_length_pos = None - self._file.write(struct.pack(' + # The Links/elinks browsers if shutil.which("links"): register("links", None, GenericBrowser("links")) if shutil.which("elinks"): register("elinks", None, Elinks("elinks")) - # The Lynx browser , + # The Lynx browser , if shutil.which("lynx"): register("lynx", None, GenericBrowser("lynx")) # The w3m browser @@ -586,125 +613,105 @@ def open(self, url, new=0, autoraise=True): return True # -# Platform support for macOS +# Platform support for MacOS # if sys.platform == 'darwin': - class MacOSXOSAScript(BaseBrowser): - def __init__(self, name='default'): - super().__init__(name) + # Adapted from patch submitted to SourceForge by Steven J. Burr + class MacOSX(BaseBrowser): + """Launcher class for Aqua browsers on Mac OS X + + Optionally specify a browser name on instantiation. Note that this + will not work for Aqua browsers if the user has moved the application + package after installation. + + If no browser is specified, the default browser, as specified in the + Internet System Preferences panel, will be used. + """ + def __init__(self, name): + self.name = name def open(self, url, new=0, autoraise=True): sys.audit("webbrowser.open", url) - url = url.replace('"', '%22') - if self.name == 'default': - script = f'open location "{url}"' # opens in default browser + assert "'" not in url + # hack for local urls + if not ':' in url: + url = 'file:'+url + + # new must be 0 or 1 + new = int(bool(new)) + if self.name == "default": + # User called open, open_new or get without a browser parameter + script = 'open location "%s"' % url.replace('"', '%22') # opens in default browser else: - script = f''' - tell application "{self.name}" - activate - open location "{url}" - end - ''' - + # User called get and chose a browser + if self.name == "OmniWeb": + toWindow = "" + else: + # Include toWindow parameter of OpenURL command for browsers + # that support it. 0 == new window; -1 == existing + toWindow = "toWindow %d" % (new - 1) + cmd = 'OpenURL "%s"' % url.replace('"', '%22') + script = '''tell application "%s" + activate + %s %s + end tell''' % (self.name, cmd, toWindow) + # Open pipe to AppleScript through osascript command osapipe = os.popen("osascript", "w") if osapipe is None: return False - + # Write script to osascript's stdin osapipe.write(script) rc = osapipe.close() return not rc -# -# Platform support for iOS -# -if sys.platform == "ios": - from _ios_support import objc - if objc: - # If objc exists, we know ctypes is also importable. - from ctypes import c_void_p, c_char_p, c_ulong + class MacOSXOSAScript(BaseBrowser): + def __init__(self, name): + self._name = name - class IOSBrowser(BaseBrowser): def open(self, url, new=0, autoraise=True): - sys.audit("webbrowser.open", url) - # If ctypes isn't available, we can't open a browser - if objc is None: - return False - - # All the messages in this call return object references. - objc.objc_msgSend.restype = c_void_p - - # This is the equivalent of: - # NSString url_string = - # [NSString stringWithCString:url.encode("utf-8") - # encoding:NSUTF8StringEncoding]; - NSString = objc.objc_getClass(b"NSString") - constructor = objc.sel_registerName(b"stringWithCString:encoding:") - objc.objc_msgSend.argtypes = [c_void_p, c_void_p, c_char_p, c_ulong] - url_string = objc.objc_msgSend( - NSString, - constructor, - url.encode("utf-8"), - 4, # NSUTF8StringEncoding = 4 - ) - - # Create an NSURL object representing the URL - # This is the equivalent of: - # NSURL *nsurl = [NSURL URLWithString:url]; - NSURL = objc.objc_getClass(b"NSURL") - urlWithString_ = objc.sel_registerName(b"URLWithString:") - objc.objc_msgSend.argtypes = [c_void_p, c_void_p, c_void_p] - ns_url = objc.objc_msgSend(NSURL, urlWithString_, url_string) - - # Get the shared UIApplication instance - # This code is the equivalent of: - # UIApplication shared_app = [UIApplication sharedApplication] - UIApplication = objc.objc_getClass(b"UIApplication") - sharedApplication = objc.sel_registerName(b"sharedApplication") - objc.objc_msgSend.argtypes = [c_void_p, c_void_p] - shared_app = objc.objc_msgSend(UIApplication, sharedApplication) - - # Open the URL on the shared application - # This code is the equivalent of: - # [shared_app openURL:ns_url - # options:NIL - # completionHandler:NIL]; - openURL_ = objc.sel_registerName(b"openURL:options:completionHandler:") - objc.objc_msgSend.argtypes = [ - c_void_p, c_void_p, c_void_p, c_void_p, c_void_p - ] - # Method returns void - objc.objc_msgSend.restype = None - objc.objc_msgSend(shared_app, openURL_, ns_url, None, None) - - return True - - -def parse_args(arg_list: list[str] | None): - import argparse - parser = argparse.ArgumentParser(description="Open URL in a web browser.") - parser.add_argument("url", help="URL to open") - - group = parser.add_mutually_exclusive_group() - group.add_argument("-n", "--new-window", action="store_const", - const=1, default=0, dest="new_win", - help="open new window") - group.add_argument("-t", "--new-tab", action="store_const", - const=2, default=0, dest="new_win", - help="open new tab") - - args = parser.parse_args(arg_list) + if self._name == 'default': + script = 'open location "%s"' % url.replace('"', '%22') # opens in default browser + else: + script = ''' + tell application "%s" + activate + open location "%s" + end + '''%(self._name, url.replace('"', '%22')) - return args + osapipe = os.popen("osascript", "w") + if osapipe is None: + return False + osapipe.write(script) + rc = osapipe.close() + return not rc -def main(arg_list: list[str] | None = None): - args = parse_args(arg_list) - open(args.url, args.new_win) +def main(): + import getopt + usage = """Usage: %s [-n | -t] url + -n: open new window + -t: open new tab""" % sys.argv[0] + try: + opts, args = getopt.getopt(sys.argv[1:], 'ntd') + except getopt.error as msg: + print(msg, file=sys.stderr) + print(usage, file=sys.stderr) + sys.exit(1) + new_win = 0 + for o, a in opts: + if o == '-n': new_win = 1 + elif o == '-t': new_win = 2 + if len(args) != 1: + print(usage, file=sys.stderr) + sys.exit(1) + + url = args[0] + open(url, new_win) print("\a") - if __name__ == "__main__": main() diff --git a/README.md b/README.md index e472bd9f6a..38e4d8fa8c 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,7 @@ A Python-3 (CPython >= 3.13.0) Interpreter written in Rust :snake: :scream: [![docs.rs](https://docs.rs/rustpython/badge.svg)](https://docs.rs/rustpython/) [![Crates.io](https://img.shields.io/crates/v/rustpython)](https://crates.io/crates/rustpython) [![dependency status](https://deps.rs/crate/rustpython/0.1.1/status.svg)](https://deps.rs/crate/rustpython/0.1.1) +[![WAPM package](https://wapm.io/package/rustpython/badge.svg?style=flat)](https://wapm.io/package/rustpython) [![Open in Gitpod](https://img.shields.io/static/v1?label=Open%20in&message=Gitpod&color=1aa6e4&logo=gitpod)](https://gitpod.io#https://github.com/RustPython/RustPython) ## Usage diff --git a/common/Cargo.toml b/common/Cargo.toml index 0649c2a509..299c2875b2 100644 --- a/common/Cargo.toml +++ b/common/Cargo.toml @@ -32,8 +32,9 @@ parking_lot = { workspace = true, optional = true } unicode_names2 = { workspace = true } lock_api = "0.4" -radium = "1.1" +radium = "0.7" siphasher = "1" +volatile = "0.3" [target.'cfg(windows)'.dependencies] widestring = { workspace = true } diff --git a/common/src/cmp.rs b/common/src/cmp.rs new file mode 100644 index 0000000000..d182340a98 --- /dev/null +++ b/common/src/cmp.rs @@ -0,0 +1,48 @@ +use volatile::Volatile; + +/// Compare 2 byte slices in a way that ensures that the timing of the operation can't be used to +/// glean any information about the data. +#[inline(never)] +#[cold] +pub fn timing_safe_cmp(a: &[u8], b: &[u8]) -> bool { + // we use raw pointers here to keep faithful to the C implementation and + // to try to avoid any optimizations rustc might do with slices + let len_a = a.len(); + let a = a.as_ptr(); + let len_b = b.len(); + let b = b.as_ptr(); + /* The volatile type declarations make sure that the compiler has no + * chance to optimize and fold the code in any way that may change + * the timing. + */ + let mut result: u8 = 0; + /* loop count depends on length of b */ + let length: Volatile = Volatile::new(len_b); + let mut left: Volatile<*const u8> = Volatile::new(std::ptr::null()); + let mut right: Volatile<*const u8> = Volatile::new(b); + + /* don't use else here to keep the amount of CPU instructions constant, + * volatile forces re-evaluation + * */ + if len_a == length.read() { + left.write(Volatile::new(a).read()); + result = 0; + } + if len_a != length.read() { + left.write(b); + result = 1; + } + + for _ in 0..length.read() { + let l = left.read(); + left.write(l.wrapping_add(1)); + let r = right.read(); + right.write(r.wrapping_add(1)); + // safety: the 0..length range will always be either: + // * as long as the length of both a and b, if len_a and len_b are equal + // * as long as b, and both `left` and `right` are b + result |= unsafe { l.read_volatile() ^ r.read_volatile() }; + } + + result == 0 +} diff --git a/common/src/lib.rs b/common/src/lib.rs index c99ba0286a..c75451802a 100644 --- a/common/src/lib.rs +++ b/common/src/lib.rs @@ -1,6 +1,6 @@ //! A crate to hold types and functions common to all rustpython components. -#![cfg_attr(all(target_os = "wasi", target_env = "p2"), feature(wasip2))] +#![cfg_attr(target_os = "redox", feature(byte_slice_trim_ascii, new_uninit))] #[macro_use] mod macros; @@ -10,6 +10,7 @@ pub mod atomic; pub mod borrow; pub mod boxvec; pub mod cformat; +pub mod cmp; #[cfg(any(unix, windows, target_os = "wasi"))] pub mod crt_fd; pub mod encodings; diff --git a/compiler/codegen/src/compile.rs b/compiler/codegen/src/compile.rs index 762585783c..3c92146c13 100644 --- a/compiler/codegen/src/compile.rs +++ b/compiler/codegen/src/compile.rs @@ -1944,35 +1944,32 @@ impl Compiler<'_> { n: Option<&Identifier>, pc: &mut PatternContext, ) -> CompileResult<()> { - match n { - // If no name is provided, simply pop the top of the stack. - None => { - emit!(self, Instruction::Pop); - Ok(()) - } - Some(name) => { - // Check if the name is forbidden for storing. - if self.forbidden_name(name.as_str(), NameUsage::Store)? { - return Err(self.compile_error_forbidden_name(name.as_str())); - } - - // Ensure we don't store the same name twice. - // TODO: maybe pc.stores should be a set? - if pc.stores.contains(&name.to_string()) { - return Err( - self.error(CodegenErrorType::DuplicateStore(name.as_str().to_string())) - ); - } + // If no name is provided, simply pop the top of the stack. + if n.is_none() { + emit!(self, Instruction::Pop); + return Ok(()); + } + let name = n.unwrap(); - // Calculate how many items to rotate: - let rotations = pc.on_top + pc.stores.len() + 1; - self.pattern_helper_rotate(rotations)?; + // Check if the name is forbidden for storing. + if self.forbidden_name(name.as_str(), NameUsage::Store)? { + return Err(self.compile_error_forbidden_name(name.as_str())); + } - // Append the name to the captured stores. - pc.stores.push(name.to_string()); - Ok(()) - } + // Ensure we don't store the same name twice. + if pc.stores.contains(&name.to_string()) { + return Err(self.error(CodegenErrorType::DuplicateStore(name.as_str().to_string()))); } + + // Calculate how many items to rotate: + // the count is the number of items to preserve on top plus the current stored names, + // plus one for the new value. + let rotations = pc.on_top + pc.stores.len() + 1; + self.pattern_helper_rotate(rotations)?; + + // Append the name to the captured stores. + pc.stores.push(name.to_string()); + Ok(()) } fn pattern_unpack_helper(&mut self, elts: &[Pattern]) -> CompileResult<()> { @@ -2158,7 +2155,10 @@ impl Compiler<'_> { for ident in attrs.iter().take(n_attrs).skip(i + 1) { let other = ident.as_str(); if attr == other { - return Err(self.error(CodegenErrorType::RepeatedAttributePattern)); + todo!(); + // return Err(self.compiler_error( + // &format!("attribute name repeated in class pattern: {}", attr), + // )); } } } @@ -2185,6 +2185,16 @@ impl Compiler<'_> { let nargs = patterns.len(); let n_attrs = kwd_attrs.len(); + let nkwd_patterns = kwd_patterns.len(); + + // Validate that keyword attribute names and patterns match in length. + if n_attrs != nkwd_patterns { + let msg = format!( + "kwd_attrs ({}) / kwd_patterns ({}) length mismatch in class pattern", + n_attrs, nkwd_patterns + ); + unreachable!("{}", msg); + } // Check for too many sub-patterns. if nargs > u32::MAX as usize || (nargs + n_attrs).saturating_sub(1) > i32::MAX as usize { @@ -2213,8 +2223,6 @@ impl Compiler<'_> { }); } - use bytecode::TestOperator::*; - // Emit instructions: // 1. Load the new tuple of attribute names. self.emit_load_const(ConstantData::Tuple { @@ -2227,7 +2235,7 @@ impl Compiler<'_> { // 4. Load None. self.emit_load_const(ConstantData::None); // 5. Compare with IS_OP 1. - emit!(self, Instruction::TestOperation { op: IsNot }); + emit!(self, Instruction::IsOperation(true)); // At this point the TOS is a tuple of (nargs + n_attrs) attributes (or None). pc.on_top += 1; @@ -2245,12 +2253,20 @@ impl Compiler<'_> { pc.on_top -= 1; // Process each sub-pattern. - for subpattern in patterns.iter().chain(kwd_patterns.iter()) { - // Decrement the on_top counter as each sub-pattern is processed - // (on_top should be zero at the end of the algorithm as a sanity check). + for i in 0..total { + // Decrement the on_top counter as each sub-pattern is processed. pc.on_top -= 1; + let subpattern = if i < nargs { + // Positional sub-pattern. + &patterns[i] + } else { + // Keyword sub-pattern. + &kwd_patterns[i - nargs] + }; if subpattern.is_wildcard() { + // For wildcard patterns, simply pop the top of the stack. emit!(self, Instruction::Pop); + continue; } // Compile the subpattern without irrefutability checks. self.compile_pattern_subpattern(subpattern, pc)?; @@ -2335,7 +2351,7 @@ impl Compiler<'_> { // emit!(self, Instruction::CopyItem { index: 1_u32 }); // self.emit_load_const(ConstantData::None); // // TODO: should be is - // emit!(self, Instruction::TestOperation::IsNot); + // emit!(self, Instruction::IsOperation(true)); // self.jump_to_fail_pop(pc, JumpOp::PopJumpIfFalse)?; // // Unpack the tuple of values. @@ -2412,16 +2428,15 @@ impl Compiler<'_> { } else { let control_vec = control.as_ref().unwrap(); if nstores != control_vec.len() { - return Err(self.error(CodegenErrorType::ConflictingNameBindPattern)); + todo!(); + // return self.compiler_error("alternative patterns bind different names"); } else if nstores > 0 { // Check that the names occur in the same order. for icontrol in (0..nstores).rev() { let name = &control_vec[icontrol]; // Find the index of `name` in the current stores. - let istores = - pc.stores.iter().position(|n| n == name).ok_or_else(|| { - self.error(CodegenErrorType::ConflictingNameBindPattern) - })?; + let istores = pc.stores.iter().position(|n| n == name).unwrap(); + // .ok_or_else(|| self.compiler_error("alternative patterns bind different names"))?; if icontrol != istores { // The orders differ; we must reorder. assert!(istores < icontrol, "expected istores < icontrol"); @@ -2465,14 +2480,14 @@ impl Compiler<'_> { self.switch_to_block(end); // Adjust the final captures. - let n_stores = control.as_ref().unwrap().len(); - let n_rots = n_stores + 1 + pc.on_top + pc.stores.len(); - for i in 0..n_stores { + let nstores = control.as_ref().unwrap().len(); + let nrots = nstores + 1 + pc.on_top + pc.stores.len(); + for i in 0..nstores { // Rotate the capture to its proper place. - self.pattern_helper_rotate(n_rots)?; + self.pattern_helper_rotate(nrots)?; let name = &control.as_ref().unwrap()[i]; // Check for duplicate binding. - if pc.stores.contains(name) { + if pc.stores.iter().any(|n| n == name) { return Err(self.error(CodegenErrorType::DuplicateStore(name.to_string()))); } pc.stores.push(name.clone()); @@ -4593,6 +4608,23 @@ for stop_exc in (StopIteration('spam'), StopAsyncIteration('ham')): self.assertIs(ex, stop_exc) else: self.fail(f'{stop_exc} was suppressed') +" + )); + } + + #[test] + fn test_match() { + assert_dis_snapshot!(compile_exec( + "\ +class Test: + pass + +t = Test() +match t: + case Test(): + assert True + case _: + assert False " )); } diff --git a/compiler/codegen/src/error.rs b/compiler/codegen/src/error.rs index 4394b936d2..b1b4f9379f 100644 --- a/compiler/codegen/src/error.rs +++ b/compiler/codegen/src/error.rs @@ -65,8 +65,6 @@ pub enum CodegenErrorType { ForbiddenName, DuplicateStore(String), UnreachablePattern(PatternUnreachableReason), - RepeatedAttributePattern, - ConflictingNameBindPattern, NotImplementedYet, // RustPython marker for unimplemented features } @@ -121,12 +119,6 @@ impl fmt::Display for CodegenErrorType { UnreachablePattern(reason) => { write!(f, "{reason} makes remaining patterns unreachable") } - RepeatedAttributePattern => { - write!(f, "attribute name repeated in class pattern") - } - ConflictingNameBindPattern => { - write!(f, "alternative patterns bind different names") - } NotImplementedYet => { write!(f, "RustPython does not implement this feature yet") } diff --git a/compiler/codegen/src/ir.rs b/compiler/codegen/src/ir.rs index ee29c65a16..39857e6fc5 100644 --- a/compiler/codegen/src/ir.rs +++ b/compiler/codegen/src/ir.rs @@ -244,9 +244,6 @@ impl CodeInfo { let instr_display = instr.display(display_arg, self); eprint!("{instr_display}: {depth} {effect:+} => "); } - if effect < 0 && depth < effect.unsigned_abs() { - panic!("The stack will underflow at {depth} with {effect} effect on {instr:?}"); - } let new_depth = depth.checked_add_signed(effect).unwrap(); if DEBUG { eprintln!("{new_depth}"); diff --git a/compiler/core/src/bytecode.rs b/compiler/core/src/bytecode.rs index e00ca28a58..81dd591ad1 100644 --- a/compiler/core/src/bytecode.rs +++ b/compiler/core/src/bytecode.rs @@ -437,6 +437,9 @@ pub enum Instruction { TestOperation { op: Arg, }, + /// If the argument is true, perform IS NOT. Otherwise perform the IS operation. + // TODO: duplication of TestOperator::{Is,IsNot}. Fix later. + IsOperation(Arg), CompareOperation { op: Arg, }, @@ -1224,7 +1227,8 @@ impl Instruction { BinaryOperation { .. } | BinaryOperationInplace { .. } | TestOperation { .. } - | CompareOperation { .. } => -1, + | CompareOperation { .. } + | IsOperation(..) => -1, BinarySubscript => -1, CopyItem { .. } => 1, Pop => -1, @@ -1432,6 +1436,7 @@ impl Instruction { BinarySubscript => w!(BinarySubscript), LoadAttr { idx } => w!(LoadAttr, name = idx), TestOperation { op } => w!(TestOperation, ?op), + IsOperation(neg) => w!(IsOperation, neg), CompareOperation { op } => w!(CompareOperation, ?op), CopyItem { index } => w!(CopyItem, index), Pop => w!(Pop), diff --git a/scripts/fix_test.py b/scripts/fix_test.py deleted file mode 100644 index 99dfa2699a..0000000000 --- a/scripts/fix_test.py +++ /dev/null @@ -1,137 +0,0 @@ -""" -An automated script to mark failures in python test suite. -It adds @unittest.expectedFailure to the test functions that are failing in RustPython, but not in CPython. -As well as marking the test with a TODO comment. - -How to use: -1. Copy a specific test from the CPython repository to the RustPython repository. -2. Remove all unexpected failures from the test and skip the tests that hang -3. Run python ./scripts/fix_test.py --test test_venv --path ./Lib/test/test_venv.py or equivalent for the test from the project root. -4. Ensure that there are no unexpected successes in the test. -5. Actually fix the test. -""" -import argparse -import ast -import itertools -import platform -from pathlib import Path - -def parse_args(): - parser = argparse.ArgumentParser(description="Fix test.") - parser.add_argument("--path", type=Path, help="Path to test file") - parser.add_argument("--force", action="store_true", help="Force modification") - parser.add_argument("--platform", action="store_true", help="Platform specific failure") - - args = parser.parse_args() - return args - -class Test: - name: str = "" - path: str = "" - result: str = "" - - def __str__(self): - return f"Test(name={self.name}, path={self.path}, result={self.result})" - -class TestResult: - tests_result: str = "" - tests = [] - stdout = "" - - def __str__(self): - return f"TestResult(tests_result={self.tests_result},tests={len(self.tests)})" - - -def parse_results(result): - lines = result.stdout.splitlines() - test_results = TestResult() - test_results.stdout = result.stdout - in_test_results = False - for line in lines: - if line == "Run tests sequentially": - in_test_results = True - elif line.startswith("-----------"): - in_test_results = False - if in_test_results and not line.startswith("tests") and not line.startswith("["): - line = line.split(" ") - if line != [] and len(line) > 3: - test = Test() - test.name = line[0] - test.path = line[1].strip("(").strip(")") - test.result = " ".join(line[3:]).lower() - test_results.tests.append(test) - else: - if "== Tests result: " in line: - res = line.split("== Tests result: ")[1] - res = res.split(" ")[0] - test_results.tests_result = res - return test_results - -def path_to_test(path) -> list[str]: - return path.split(".")[2:] - -def modify_test(file: str, test: list[str], for_platform: bool = False) -> str: - a = ast.parse(file) - lines = file.splitlines() - fixture = "@unittest.expectedFailure" - for node in ast.walk(a): - if isinstance(node, ast.FunctionDef): - if node.name == test[-1]: - assert not for_platform - indent = " " * node.col_offset - lines.insert(node.lineno - 1, indent + fixture) - lines.insert(node.lineno - 1, indent + "# TODO: RUSTPYTHON") - break - return "\n".join(lines) - -def modify_test_v2(file: str, test: list[str], for_platform: bool = False) -> str: - a = ast.parse(file) - lines = file.splitlines() - fixture = "@unittest.expectedFailure" - for key, node in ast.iter_fields(a): - if key == "body": - for i, n in enumerate(node): - match n: - case ast.ClassDef(): - if len(test) == 2 and test[0] == n.name: - # look through body for function def - for i, fn in enumerate(n.body): - match fn: - case ast.FunctionDef(): - if fn.name == test[-1]: - assert not for_platform - indent = " " * fn.col_offset - lines.insert(fn.lineno - 1, indent + fixture) - lines.insert(fn.lineno - 1, indent + "# TODO: RUSTPYTHON") - break - case ast.FunctionDef(): - if n.name == test[0] and len(test) == 1: - assert not for_platform - indent = " " * n.col_offset - lines.insert(n.lineno - 1, indent + fixture) - lines.insert(n.lineno - 1, indent + "# TODO: RUSTPYTHON") - break - if i > 500: - exit() - return "\n".join(lines) - -def run_test(test_name): - print(f"Running test: {test_name}") - rustpython_location = "./target/release/rustpython" - import subprocess - result = subprocess.run([rustpython_location, "-m", "test", "-v", test_name], capture_output=True, text=True) - return parse_results(result) - - -if __name__ == "__main__": - args = parse_args() - test_name = args.path.stem - tests = run_test(test_name) - f = open(args.path).read() - for test in tests.tests: - if test.result == "fail" or test.result == "error": - print("Modifying test:", test.name) - f = modify_test_v2(f, path_to_test(test.path), args.platform) - with open(args.path, "w") as file: - # TODO: Find validation method, and make --force override it - file.write(f) diff --git a/src/lib.rs b/src/lib.rs index 262904c1cb..e9c9c91294 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -35,8 +35,6 @@ //! //! The binary will have all the standard arguments of a python interpreter (including a REPL!) but //! it will have your modules loaded into the vm. - -#![cfg_attr(all(target_os = "wasi", target_env = "p2"), feature(wasip2))] #![allow(clippy::needless_doctest_main)] #[macro_use] diff --git a/stdlib/Cargo.toml b/stdlib/Cargo.toml index d29bd3b21e..d4b99ebc92 100644 --- a/stdlib/Cargo.toml +++ b/stdlib/Cargo.toml @@ -14,6 +14,7 @@ license.workspace = true default = ["compiler"] compiler = ["rustpython-vm/compiler"] threading = ["rustpython-common/threading", "rustpython-vm/threading"] +bz2 = ["bzip2"] sqlite = ["dep:libsqlite3-sys"] ssl = ["openssl", "openssl-sys", "foreign-types-shared", "openssl-probe"] ssl-vendor = ["ssl", "openssl/vendored"] @@ -78,8 +79,8 @@ ucd = "0.1.1" adler32 = "1.2.0" crc32fast = "1.3.2" flate2 = { version = "1.1", default-features = false, features = ["zlib-rs"] } -libz-sys = { package = "libz-rs-sys", version = "0.5" } -bzip2 = { version = "0.5", features = ["libbz2-rs-sys"] } +libz-sys = { package = "libz-rs-sys", version = "0.4" } +bzip2 = { version = "0.4", optional = true } # tkinter tk-sys = { git = "https://github.com/arihant2math/tkinter.git", tag = "v0.1.0", optional = true } @@ -131,7 +132,7 @@ features = [ ] [target.'cfg(target_os = "macos")'.dependencies] -system-configuration = "0.6.1" +system-configuration = "0.5.1" [lints] workspace = true diff --git a/stdlib/src/bz2.rs b/stdlib/src/bz2.rs index 6339a44a24..ba74a38db1 100644 --- a/stdlib/src/bz2.rs +++ b/stdlib/src/bz2.rs @@ -12,48 +12,28 @@ mod _bz2 { object::{PyPayload, PyResult}, types::Constructor, }; - use crate::zlib::{ - DecompressArgs, DecompressError, DecompressState, DecompressStatus, Decompressor, - }; use bzip2::{Decompress, Status, write::BzEncoder}; - use rustpython_vm::convert::ToPyException; use std::{fmt, io::Write}; - const BUFSIZ: usize = 8192; + // const BUFSIZ: i32 = 8192; + + struct DecompressorState { + decoder: Decompress, + eof: bool, + needs_input: bool, + // input_buffer: Vec, + // output_buffer: Vec, + } #[pyattr] #[pyclass(name = "BZ2Decompressor")] #[derive(PyPayload)] struct BZ2Decompressor { - state: PyMutex>, - } - - impl Decompressor for Decompress { - type Flush = (); - type Status = Status; - type Error = bzip2::Error; - - fn total_in(&self) -> u64 { - self.total_in() - } - fn decompress_vec( - &mut self, - input: &[u8], - output: &mut Vec, - (): Self::Flush, - ) -> Result { - self.decompress_vec(input, output) - } - } - - impl DecompressStatus for Status { - fn is_stream_end(&self) -> bool { - *self == Status::StreamEnd - } + state: PyMutex, } impl fmt::Debug for BZ2Decompressor { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "_bz2.BZ2Decompressor") } } @@ -63,7 +43,13 @@ mod _bz2 { fn py_new(cls: PyTypeRef, _: Self::Args, vm: &VirtualMachine) -> PyResult { Self { - state: PyMutex::new(DecompressState::new(Decompress::new(false), vm)), + state: PyMutex::new(DecompressorState { + decoder: Decompress::new(false), + eof: false, + needs_input: true, + // input_buffer: Vec::new(), + // output_buffer: Vec::new(), + }), } .into_ref_with_type(vm, cls) .map(Into::into) @@ -73,34 +59,107 @@ mod _bz2 { #[pyclass(with(Constructor))] impl BZ2Decompressor { #[pymethod] - fn decompress(&self, args: DecompressArgs, vm: &VirtualMachine) -> PyResult> { - let max_length = args.max_length(); - let data = &*args.data(); + fn decompress( + &self, + data: ArgBytesLike, + // TODO: PyIntRef + max_length: OptionalArg, + vm: &VirtualMachine, + ) -> PyResult { + let max_length = max_length.unwrap_or(-1); + if max_length >= 0 { + return Err(vm.new_not_implemented_error( + "the max_value argument is not implemented yet".to_owned(), + )); + } + // let max_length = if max_length < 0 || max_length >= BUFSIZ { + // BUFSIZ + // } else { + // max_length + // }; let mut state = self.state.lock(); - state - .decompress(data, max_length, BUFSIZ, vm) - .map_err(|e| match e { - DecompressError::Decompress(err) => vm.new_os_error(err.to_string()), - DecompressError::Eof(err) => err.to_pyexception(vm), - }) + let DecompressorState { + decoder, + eof, + .. + // needs_input, + // input_buffer, + // output_buffer, + } = &mut *state; + + if *eof { + return Err(vm.new_exception_msg( + vm.ctx.exceptions.eof_error.to_owned(), + "End of stream already reached".to_owned(), + )); + } + + // data.with_ref(|data| input_buffer.extend(data)); + + // If max_length is negative: + // read the input X bytes at a time, compress it and append it to output. + // Once you're out of input, setting needs_input to true and return the + // output as bytes. + // + // TODO: + // If max_length is non-negative: + // Read the input X bytes at a time, compress it and append it to + // the output. If output reaches `max_length` in size, return + // it (up to max_length), and store the rest of the output + // for later. + + // TODO: arbitrary choice, not the right way to do it. + let mut buf = Vec::with_capacity(data.len() * 32); + + let before = decoder.total_in(); + let res = data.with_ref(|data| decoder.decompress_vec(data, &mut buf)); + let _written = (decoder.total_in() - before) as usize; + + let res = match res { + Ok(x) => x, + // TODO: error message + _ => return Err(vm.new_os_error("Invalid data stream".to_owned())), + }; + + if res == Status::StreamEnd { + *eof = true; + } + Ok(vm.ctx.new_bytes(buf.to_vec())) } #[pygetset] fn eof(&self) -> bool { - self.state.lock().eof() + let state = self.state.lock(); + state.eof } #[pygetset] - fn unused_data(&self) -> PyBytesRef { - self.state.lock().unused_data() + fn unused_data(&self, vm: &VirtualMachine) -> PyBytesRef { + // Data found after the end of the compressed stream. + // If this attribute is accessed before the end of the stream + // has been reached, its value will be b''. + vm.ctx.new_bytes(b"".to_vec()) + // alternatively, be more honest: + // Err(vm.new_not_implemented_error( + // "unused_data isn't implemented yet".to_owned(), + // )) + // + // TODO + // let state = self.state.lock(); + // if state.eof { + // vm.ctx.new_bytes(state.input_buffer.to_vec()) + // else { + // vm.ctx.new_bytes(b"".to_vec()) + // } } #[pygetset] fn needs_input(&self) -> bool { // False if the decompress() method can provide more // decompressed data before requiring new uncompressed input. - self.state.lock().needs_input() + let state = self.state.lock(); + state.needs_input } // TODO: mro()? @@ -119,7 +178,7 @@ mod _bz2 { } impl fmt::Debug for BZ2Compressor { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "_bz2.BZ2Compressor") } } diff --git a/stdlib/src/lib.rs b/stdlib/src/lib.rs index 7b3124c50d..e9d10dfde4 100644 --- a/stdlib/src/lib.rs +++ b/stdlib/src/lib.rs @@ -1,8 +1,7 @@ // to allow `mod foo {}` in foo.rs; clippy thinks this is a mistake/misunderstanding of // how `mod` works, but we want this sometimes for pymodule declarations - #![allow(clippy::module_inception)] -#![cfg_attr(all(target_os = "wasi", target_env = "p2"), feature(wasip2))] +#![cfg_attr(target_os = "redox", feature(raw_ref_op))] #[macro_use] extern crate rustpython_derive; @@ -37,6 +36,7 @@ mod statistics; mod suggestions; // TODO: maybe make this an extension module, if we ever get those // mod re; +#[cfg(feature = "bz2")] mod bz2; #[cfg(not(target_arch = "wasm32"))] pub mod socket; @@ -112,7 +112,6 @@ pub fn get_module_inits() -> impl Iterator, StdlibInit "array" => array::make_module, "binascii" => binascii::make_module, "_bisect" => bisect::make_module, - "_bz2" => bz2::make_module, "cmath" => cmath::make_module, "_contextvars" => contextvars::make_module, "_csv" => csv::make_module, @@ -135,7 +134,7 @@ pub fn get_module_inits() -> impl Iterator, StdlibInit "unicodedata" => unicodedata::make_module, "zlib" => zlib::make_module, "_statistics" => statistics::make_module, - "_suggestions" => suggestions::make_module, + "suggestions" => suggestions::make_module, // crate::vm::sysmodule::sysconfigdata_name() => sysconfigdata::make_module, } #[cfg(any(unix, target_os = "wasi"))] @@ -159,6 +158,10 @@ pub fn get_module_inits() -> impl Iterator, StdlibInit { "_ssl" => ssl::make_module, } + #[cfg(feature = "bz2")] + { + "_bz2" => bz2::make_module, + } #[cfg(windows)] { "_overlapped" => overlapped::make_module, diff --git a/stdlib/src/mmap.rs b/stdlib/src/mmap.rs index 9319bab64c..bca367ae4d 100644 --- a/stdlib/src/mmap.rs +++ b/stdlib/src/mmap.rs @@ -23,15 +23,13 @@ mod mmap { }; use crossbeam_utils::atomic::AtomicCell; use memmap2::{Advice, Mmap, MmapMut, MmapOptions}; - #[cfg(unix)] - use nix::sys::stat::fstat; use nix::unistd; use num_traits::Signed; use std::fs::File; - use std::io::{self, Write}; + use std::io::Write; use std::ops::{Deref, DerefMut}; #[cfg(unix)] - use std::os::unix::io::{FromRawFd, RawFd}; + use std::os::unix::io::{FromRawFd, IntoRawFd, RawFd}; fn advice_try_from_i32(vm: &VirtualMachine, i: i32) -> PyResult { Ok(match i { @@ -301,7 +299,7 @@ mod mmap { fn py_new( cls: PyTypeRef, MmapNewArgs { - fileno: fd, + fileno: mut fd, length, flags, prot, @@ -350,10 +348,12 @@ mod mmap { }; if fd != -1 { - let metadata = fstat(fd) - .map_err(|err| io::Error::from_raw_os_error(err as i32).to_pyexception(vm))?; - let file_len = metadata.st_size; - + let file = unsafe { File::from_raw_fd(fd) }; + let metadata = file.metadata().map_err(|err| err.to_pyexception(vm))?; + let file_len: libc::off_t = metadata.len().try_into().expect("file size overflow"); + // File::from_raw_fd will consume the fd, so we + // have to get it again. + fd = file.into_raw_fd(); if map_size == 0 { if file_len == 0 { return Err(vm.new_value_error("cannot mmap an empty file".to_owned())); diff --git a/stdlib/src/select.rs b/stdlib/src/select.rs index f600d60ede..f89a6c4f03 100644 --- a/stdlib/src/select.rs +++ b/stdlib/src/select.rs @@ -519,7 +519,7 @@ mod decl { use rustix::event::epoll::{self, EventData, EventFlags}; use std::ops::Deref; use std::os::fd::{AsRawFd, IntoRawFd, OwnedFd}; - use std::time::Instant; + use std::time::{Duration, Instant}; #[pyclass(module = "select", name = "epoll")] #[derive(Debug, rustpython_vm::PyPayload)] @@ -636,11 +636,12 @@ mod decl { let poll::TimeoutArg(timeout) = args.timeout; let maxevents = args.maxevents; - let mut poll_timeout = - timeout - .map(rustix::event::Timespec::try_from) - .transpose() - .map_err(|_| vm.new_overflow_error("timeout is too large".to_owned()))?; + let make_poll_timeout = |d: Duration| i32::try_from(d.as_millis()); + let mut poll_timeout = match timeout { + Some(d) => make_poll_timeout(d) + .map_err(|_| vm.new_overflow_error("timeout is too large".to_owned()))?, + None => -1, + }; let deadline = timeout.map(|d| Instant::now() + d); let maxevents = match maxevents { @@ -653,24 +654,19 @@ mod decl { _ => maxevents as usize, }; - let mut events = Vec::::with_capacity(maxevents); + let mut events = epoll::EventVec::with_capacity(maxevents); let epoll = &*self.get_epoll(vm)?; loop { - events.clear(); - match epoll::wait( - epoll, - rustix::buffer::spare_capacity(&mut events), - poll_timeout.as_ref(), - ) { - Ok(_) => break, + match epoll::wait(epoll, &mut events, poll_timeout) { + Ok(()) => break, Err(rustix::io::Errno::INTR) => vm.check_signals()?, Err(e) => return Err(e.into_pyexception(vm)), } if let Some(deadline) = deadline { if let Some(new_timeout) = deadline.checked_duration_since(Instant::now()) { - poll_timeout = Some(new_timeout.try_into().unwrap()); + poll_timeout = make_poll_timeout(new_timeout).unwrap(); } else { break; } diff --git a/stdlib/src/zlib.rs b/stdlib/src/zlib.rs index 0578f20c86..9c19b74066 100644 --- a/stdlib/src/zlib.rs +++ b/stdlib/src/zlib.rs @@ -1,17 +1,14 @@ // spell-checker:ignore compressobj decompressobj zdict chunksize zlibmodule miniz chunker -pub(crate) use zlib::{DecompressArgs, make_module}; +pub(crate) use zlib::make_module; #[pymodule] mod zlib { - use super::generic::{ - DecompressError, DecompressState, DecompressStatus, Decompressor, FlushKind, flush_sync, - }; use crate::vm::{ PyObject, PyPayload, PyResult, VirtualMachine, builtins::{PyBaseExceptionRef, PyBytesRef, PyIntRef, PyTypeRef}, common::lock::PyMutex, - convert::{ToPyException, TryFromBorrowedObject}, + convert::TryFromBorrowedObject, function::{ArgBytesLike, ArgPrimitiveIndex, ArgSize, OptionalArg}, types::Constructor, }; @@ -145,18 +142,18 @@ mod zlib { } #[derive(Clone)] - pub(crate) struct Chunker<'a> { + struct Chunker<'a> { data1: &'a [u8], data2: &'a [u8], } impl<'a> Chunker<'a> { - pub(crate) fn new(data: &'a [u8]) -> Self { + fn new(data: &'a [u8]) -> Self { Self { data1: data, data2: &[], } } - pub(crate) fn chain(data1: &'a [u8], data2: &'a [u8]) -> Self { + fn chain(data1: &'a [u8], data2: &'a [u8]) -> Self { if data1.is_empty() { Self { data1: data2, @@ -166,19 +163,19 @@ mod zlib { Self { data1, data2 } } } - pub(crate) fn len(&self) -> usize { + fn len(&self) -> usize { self.data1.len() + self.data2.len() } - pub(crate) fn is_empty(&self) -> bool { + fn is_empty(&self) -> bool { self.data1.is_empty() } - pub(crate) fn to_vec(&self) -> Vec { + fn to_vec(&self) -> Vec { [self.data1, self.data2].concat() } - pub(crate) fn chunk(&self) -> &'a [u8] { + fn chunk(&self) -> &'a [u8] { self.data1.get(..CHUNKSIZE).unwrap_or(self.data1) } - pub(crate) fn advance(&mut self, consumed: usize) { + fn advance(&mut self, consumed: usize) { self.data1 = &self.data1[consumed..]; if self.data1.is_empty() { self.data1 = std::mem::take(&mut self.data2); @@ -186,24 +183,28 @@ mod zlib { } } - fn _decompress( + fn _decompress( data: &[u8], - d: &mut D, + d: &mut Decompress, bufsize: usize, max_length: Option, - calc_flush: impl Fn(bool) -> D::Flush, - ) -> Result<(Vec, bool), D::Error> { + is_flush: bool, + zdict: Option<&ArgBytesLike>, + vm: &VirtualMachine, + ) -> PyResult<(Vec, bool)> { let mut data = Chunker::new(data); - _decompress_chunks(&mut data, d, bufsize, max_length, calc_flush) + _decompress_chunks(&mut data, d, bufsize, max_length, is_flush, zdict, vm) } - pub(super) fn _decompress_chunks( + fn _decompress_chunks( data: &mut Chunker<'_>, - d: &mut D, + d: &mut Decompress, bufsize: usize, max_length: Option, - calc_flush: impl Fn(bool) -> D::Flush, - ) -> Result<(Vec, bool), D::Error> { + is_flush: bool, + zdict: Option<&ArgBytesLike>, + vm: &VirtualMachine, + ) -> PyResult<(Vec, bool)> { if data.is_empty() { return Ok((Vec::new(), true)); } @@ -212,7 +213,16 @@ mod zlib { 'outer: loop { let chunk = data.chunk(); - let flush = calc_flush(chunk.len() == data.len()); + let flush = if is_flush { + // if this is the final chunk, finish it + if chunk.len() == data.len() { + FlushDecompress::Finish + } else { + FlushDecompress::None + } + } else { + FlushDecompress::Sync + }; loop { let additional = std::cmp::min(bufsize, max_length - buf.capacity()); if additional == 0 { @@ -228,7 +238,7 @@ mod zlib { match res { Ok(status) => { - let stream_end = status.is_stream_end(); + let stream_end = status == Status::StreamEnd; if stream_end || data.is_empty() { // we've reached the end of the stream, we're done buf.shrink_to_fit(); @@ -242,7 +252,11 @@ mod zlib { } } Err(e) => { - d.maybe_set_dict(e)?; + let Some(zdict) = e.needs_dictionary().and(zdict) else { + return Err(new_zlib_error(&e.to_string(), vm)); + }; + d.set_dictionary(&zdict.borrow_buf()) + .map_err(|_| new_zlib_error("failed to set dictionary", vm))?; // now try the next chunk continue 'outer; } @@ -271,8 +285,8 @@ mod zlib { } = args; data.with_ref(|data| { let mut d = InitOptions::new(wbits.value, vm)?.decompress(); - let (buf, stream_end) = _decompress(data, &mut d, bufsize.value, None, flush_sync) - .map_err(|e| new_zlib_error(e.to_string(), vm))?; + let (buf, stream_end) = + _decompress(data, &mut d, bufsize.value, None, false, None, vm)?; if !stream_end { return Err(new_zlib_error( "Error -5 while decompressing data: incomplete or truncated stream", @@ -302,8 +316,9 @@ mod zlib { } } let inner = PyDecompressInner { - decompress: Some(DecompressWithDict { decompress, zdict }), + decompress: Some(decompress), eof: false, + zdict, unused_data: vm.ctx.empty_bytes.clone(), unconsumed_tail: vm.ctx.empty_bytes.clone(), }; @@ -314,7 +329,8 @@ mod zlib { #[derive(Debug)] struct PyDecompressInner { - decompress: Option, + decompress: Option, + zdict: Option, eof: bool, unused_data: PyBytesRef, unconsumed_tail: PyBytesRef, @@ -354,25 +370,14 @@ mod zlib { return Err(new_zlib_error(USE_AFTER_FINISH_ERR, vm)); }; + let zdict = if is_flush { None } else { inner.zdict.as_ref() }; + let prev_in = d.total_in(); - let res = if is_flush { - // if is_flush: ignore zdict, finish if final chunk - let calc_flush = |final_chunk| { - if final_chunk { - FlushDecompress::Finish - } else { - FlushDecompress::None - } + let (ret, stream_end) = + match _decompress(data, d, bufsize, max_length, is_flush, zdict, vm) { + Ok((buf, stream_end)) => (Ok(buf), stream_end), + Err(err) => (Err(err), false), }; - _decompress(data, &mut d.decompress, bufsize, max_length, calc_flush) - } else { - _decompress(data, d, bufsize, max_length, flush_sync) - } - .map_err(|e| new_zlib_error(e.to_string(), vm)); - let (ret, stream_end) = match res { - Ok((buf, stream_end)) => (Ok(buf), stream_end), - Err(err) => (Err(err), false), - }; let consumed = (d.total_in() - prev_in) as usize; // save unused input @@ -399,7 +404,7 @@ mod zlib { .try_into() .map_err(|_| vm.new_value_error("must be non-negative".to_owned()))?; let max_length = (max_length != 0).then_some(max_length); - let data = &*args.data(); + let data = &*args.data.borrow_buf(); let inner = &mut *self.inner.lock(); @@ -435,24 +440,13 @@ mod zlib { } #[derive(FromArgs)] - pub(crate) struct DecompressArgs { + struct DecompressArgs { #[pyarg(positional)] data: ArgBytesLike, #[pyarg(any, optional)] max_length: OptionalArg, } - impl DecompressArgs { - pub(crate) fn data(&self) -> crate::common::borrow::BorrowedValue<'_, [u8]> { - self.data.borrow_buf() - } - pub(crate) fn max_length(&self) -> Option { - self.max_length - .into_option() - .and_then(|ArgSize { value }| usize::try_from(value).ok()) - } - } - #[derive(FromArgs)] #[allow(dead_code)] // FIXME: use args struct CompressobjArgs { @@ -594,8 +588,8 @@ mod zlib { } } - fn new_zlib_error(message: impl Into, vm: &VirtualMachine) -> PyBaseExceptionRef { - vm.new_exception_msg(vm.class("zlib", "error"), message.into()) + fn new_zlib_error(message: &str, vm: &VirtualMachine) -> PyBaseExceptionRef { + vm.new_exception_msg(vm.class("zlib", "error"), message.to_owned()) } const USE_AFTER_FINISH_ERR: &str = "Error -2: inconsistent stream state"; @@ -632,68 +626,19 @@ mod zlib { #[pyclass(name = "_ZlibDecompressor")] #[derive(Debug, PyPayload)] struct ZlibDecompressor { - inner: PyMutex>, + inner: PyMutex, } #[derive(Debug)] - struct DecompressWithDict { + struct ZlibDecompressorInner { decompress: Decompress, + unused_data: PyBytesRef, + input_buffer: Vec, zdict: Option, + eof: bool, + needs_input: bool, } - impl DecompressStatus for Status { - fn is_stream_end(&self) -> bool { - *self == Status::StreamEnd - } - } - - impl FlushKind for FlushDecompress { - const SYNC: Self = FlushDecompress::Sync; - } - - impl Decompressor for Decompress { - type Flush = FlushDecompress; - type Status = Status; - type Error = flate2::DecompressError; - - fn total_in(&self) -> u64 { - self.total_in() - } - fn decompress_vec( - &mut self, - input: &[u8], - output: &mut Vec, - flush: Self::Flush, - ) -> Result { - self.decompress_vec(input, output, flush) - } - } - - impl Decompressor for DecompressWithDict { - type Flush = FlushDecompress; - type Status = Status; - type Error = flate2::DecompressError; - - fn total_in(&self) -> u64 { - self.decompress.total_in() - } - fn decompress_vec( - &mut self, - input: &[u8], - output: &mut Vec, - flush: Self::Flush, - ) -> Result { - self.decompress.decompress_vec(input, output, flush) - } - fn maybe_set_dict(&mut self, err: Self::Error) -> Result<(), Self::Error> { - let zdict = err.needs_dictionary().and(self.zdict.as_ref()).ok_or(err)?; - self.decompress.set_dictionary(&zdict.borrow_buf())?; - Ok(()) - } - } - - // impl Deconstruct - impl Constructor for ZlibDecompressor { type Args = DecompressobjArgs; @@ -706,7 +651,14 @@ mod zlib { .map_err(|_| new_zlib_error("failed to set dictionary", vm))?; } } - let inner = DecompressState::new(DecompressWithDict { decompress, zdict }, vm); + let inner = ZlibDecompressorInner { + decompress, + unused_data: vm.ctx.empty_bytes.clone(), + input_buffer: Vec::new(), + zdict, + eof: false, + needs_input: true, + }; Self { inner: PyMutex::new(inner), } @@ -719,151 +671,61 @@ mod zlib { impl ZlibDecompressor { #[pygetset] fn eof(&self) -> bool { - self.inner.lock().eof() + self.inner.lock().eof } #[pygetset] fn unused_data(&self) -> PyBytesRef { - self.inner.lock().unused_data() + self.inner.lock().unused_data.clone() } #[pygetset] fn needs_input(&self) -> bool { - self.inner.lock().needs_input() + self.inner.lock().needs_input } #[pymethod] fn decompress(&self, args: DecompressArgs, vm: &VirtualMachine) -> PyResult> { - let max_length = args.max_length(); - let data = &*args.data(); + let max_length = args + .max_length + .into_option() + .and_then(|ArgSize { value }| usize::try_from(value).ok()); + let data = &*args.data.borrow_buf(); let inner = &mut *self.inner.lock(); - inner - .decompress(data, max_length, DEF_BUF_SIZE, vm) - .map_err(|e| match e { - DecompressError::Decompress(err) => new_zlib_error(err.to_string(), vm), - DecompressError::Eof(err) => err.to_pyexception(vm), - }) - } - - // TODO: Wait for getstate pyslot to be fixed - // #[pyslot] - // fn getstate(zelf: &PyObject, vm: &VirtualMachine) -> PyResult { - // Err(vm.new_type_error("cannot serialize '_ZlibDecompressor' object".to_owned())) - // } - } -} - -mod generic { - use super::zlib::{_decompress_chunks, Chunker}; - use crate::vm::{ - VirtualMachine, - builtins::{PyBaseExceptionRef, PyBytesRef}, - convert::ToPyException, - }; - - pub(crate) trait Decompressor { - type Flush: FlushKind; - type Status: DecompressStatus; - type Error; - - fn total_in(&self) -> u64; - fn decompress_vec( - &mut self, - input: &[u8], - output: &mut Vec, - flush: Self::Flush, - ) -> Result; - fn maybe_set_dict(&mut self, err: Self::Error) -> Result<(), Self::Error> { - Err(err) - } - } - - pub(crate) trait DecompressStatus { - fn is_stream_end(&self) -> bool; - } - - pub(crate) trait FlushKind: Copy { - const SYNC: Self; - } - - impl FlushKind for () { - const SYNC: Self = (); - } - - pub(super) fn flush_sync(_final_chunk: bool) -> T { - T::SYNC - } - - #[derive(Debug)] - pub(crate) struct DecompressState { - decompress: D, - unused_data: PyBytesRef, - input_buffer: Vec, - eof: bool, - needs_input: bool, - } - - impl DecompressState { - pub(crate) fn new(decompress: D, vm: &VirtualMachine) -> Self { - Self { - decompress, - unused_data: vm.ctx.empty_bytes.clone(), - input_buffer: Vec::new(), - eof: false, - needs_input: true, - } - } - - pub(crate) fn eof(&self) -> bool { - self.eof - } - - pub(crate) fn unused_data(&self) -> PyBytesRef { - self.unused_data.clone() - } - - pub(crate) fn needs_input(&self) -> bool { - self.needs_input - } - - pub(crate) fn decompress( - &mut self, - data: &[u8], - max_length: Option, - bufsize: usize, - vm: &VirtualMachine, - ) -> Result, DecompressError> { - if self.eof { - return Err(DecompressError::Eof(EofError)); + if inner.eof { + return Err(vm.new_eof_error("End of stream already reached".to_owned())); } - let input_buffer = &mut self.input_buffer; - let d = &mut self.decompress; + let input_buffer = &mut inner.input_buffer; + let d = &mut inner.decompress; let mut chunks = Chunker::chain(input_buffer, data); + let zdict = inner.zdict.as_ref(); + let bufsize = DEF_BUF_SIZE; + let prev_len = chunks.len(); let (ret, stream_end) = - match _decompress_chunks(&mut chunks, d, bufsize, max_length, flush_sync) { + match _decompress_chunks(&mut chunks, d, bufsize, max_length, false, zdict, vm) { Ok((buf, stream_end)) => (Ok(buf), stream_end), Err(err) => (Err(err), false), }; let consumed = prev_len - chunks.len(); - self.eof |= stream_end; + inner.eof |= stream_end; - if self.eof { - self.needs_input = false; + if inner.eof { + inner.needs_input = false; if !chunks.is_empty() { - self.unused_data = vm.ctx.new_bytes(chunks.to_vec()); + inner.unused_data = vm.ctx.new_bytes(chunks.to_vec()); } } else if chunks.is_empty() { input_buffer.clear(); - self.needs_input = true; + inner.needs_input = true; } else { - self.needs_input = false; + inner.needs_input = false; if let Some(n_consumed_from_data) = consumed.checked_sub(input_buffer.len()) { input_buffer.clear(); input_buffer.extend_from_slice(&data[n_consumed_from_data..]); @@ -873,28 +735,13 @@ mod generic { } } - ret.map_err(DecompressError::Decompress) - } - } - - pub(crate) enum DecompressError { - Decompress(E), - Eof(EofError), - } - - impl From for DecompressError { - fn from(err: E) -> Self { - Self::Decompress(err) + ret } - } - - pub(crate) struct EofError; - impl ToPyException for EofError { - fn to_pyexception(&self, vm: &VirtualMachine) -> PyBaseExceptionRef { - vm.new_eof_error("End of stream already reached".to_owned()) - } + // TODO: Wait for getstate pyslot to be fixed + // #[pyslot] + // fn getstate(zelf: &PyObject, vm: &VirtualMachine) -> PyResult { + // Err(vm.new_type_error("cannot serialize '_ZlibDecompressor' object".to_owned())) + // } } } - -pub(crate) use generic::{DecompressError, DecompressState, DecompressStatus, Decompressor}; diff --git a/vm/Cargo.toml b/vm/Cargo.toml index 5a4b0df2a1..125c263da9 100644 --- a/vm/Cargo.toml +++ b/vm/Cargo.toml @@ -10,8 +10,7 @@ repository.workspace = true license.workspace = true [features] -default = ["compiler", "wasmbind", "stdio"] -stdio = [] +default = ["compiler", "wasmbind"] importlib = [] encodings = ["importlib"] vm-tracing-logging = [] @@ -52,7 +51,6 @@ bstr = { workspace = true } cfg-if = { workspace = true } crossbeam-utils = { workspace = true } chrono = { workspace = true, features = ["wasmbind"] } -constant_time_eq = { workspace = true } flame = { workspace = true, optional = true } getrandom = { workspace = true } hex = { workspace = true } @@ -103,7 +101,7 @@ uname = "0.1.1" [target.'cfg(not(target_arch = "wasm32"))'.dependencies] rustyline = { workspace = true } -which = "7" +which = "6" errno = "0.3" widestring = { workspace = true } diff --git a/vm/src/frame.rs b/vm/src/frame.rs index dbe5cb077a..6ffb454f54 100644 --- a/vm/src/frame.rs +++ b/vm/src/frame.rs @@ -851,6 +851,14 @@ impl ExecutingFrame<'_> { bytecode::Instruction::UnaryOperation { op } => self.execute_unary_op(vm, op.get(arg)), bytecode::Instruction::TestOperation { op } => self.execute_test(vm, op.get(arg)), bytecode::Instruction::CompareOperation { op } => self.execute_compare(vm, op.get(arg)), + bytecode::Instruction::IsOperation(neg) => { + let a = self.pop_value(); + let b = self.pop_value(); + // xor with neg to invert the result if needed + let result = vm.ctx.new_bool(a.is(b.as_ref()) ^ neg.get(arg)); + self.push_value(result.into()); + Ok(None) + } bytecode::Instruction::ReturnValue => { let value = self.pop_value(); self.unwind_blocks(vm, UnwindReason::Returning { value }) diff --git a/vm/src/function/number.rs b/vm/src/function/number.rs index bead82123e..0e36f57ad1 100644 --- a/vm/src/function/number.rs +++ b/vm/src/function/number.rs @@ -158,7 +158,7 @@ impl TryFromObject for ArgIndex { } } -#[derive(Debug, Copy, Clone)] +#[derive(Debug)] #[repr(transparent)] pub struct ArgPrimitiveIndex { pub value: T, diff --git a/vm/src/lib.rs b/vm/src/lib.rs index e854518dc2..de0042c619 100644 --- a/vm/src/lib.rs +++ b/vm/src/lib.rs @@ -14,6 +14,7 @@ #![allow(clippy::upper_case_acronyms)] #![doc(html_logo_url = "https://raw.githubusercontent.com/RustPython/RustPython/main/logo.png")] #![doc(html_root_url = "https://docs.rs/rustpython-vm/")] +#![cfg_attr(target_os = "redox", feature(raw_ref_op))] #[cfg(feature = "flame-it")] #[macro_use] diff --git a/vm/src/stdlib/nt.rs b/vm/src/stdlib/nt.rs index cdab9e2f71..083824bcd0 100644 --- a/vm/src/stdlib/nt.rs +++ b/vm/src/stdlib/nt.rs @@ -388,27 +388,6 @@ pub(crate) mod module { } } - #[pyfunction] - fn getlogin(vm: &VirtualMachine) -> PyResult { - let mut buffer = [0u16; 257]; - let mut size = buffer.len() as u32; - - let success = unsafe { - windows_sys::Win32::System::WindowsProgramming::GetUserNameW( - buffer.as_mut_ptr(), - &mut size, - ) - }; - - if success != 0 { - // Convert the buffer (which is UTF-16) to a Rust String - let username = std::ffi::OsString::from_wide(&buffer[..(size - 1) as usize]); - Ok(username.to_str().unwrap().to_string()) - } else { - Err(vm.new_os_error(format!("Error code: {success}"))) - } - } - pub fn raw_set_handle_inheritable(handle: intptr_t, inheritable: bool) -> std::io::Result<()> { let flags = if inheritable { Foundation::HANDLE_FLAG_INHERIT diff --git a/vm/src/stdlib/operator.rs b/vm/src/stdlib/operator.rs index 2404b0c337..fbb8147e9f 100644 --- a/vm/src/stdlib/operator.rs +++ b/vm/src/stdlib/operator.rs @@ -2,6 +2,7 @@ pub(crate) use _operator::make_module; #[pymodule] mod _operator { + use crate::common::cmp; use crate::{ AsObject, Py, PyObjectRef, PyPayload, PyRef, PyResult, VirtualMachine, builtins::{PyInt, PyIntRef, PyStr, PyStrRef, PyTupleRef, PyTypeRef}, @@ -12,7 +13,6 @@ mod _operator { recursion::ReprGuard, types::{Callable, Constructor, PyComparisonOp, Representable}, }; - use constant_time_eq::constant_time_eq; #[pyfunction] fn lt(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { @@ -328,9 +328,11 @@ mod _operator { "comparing strings with non-ASCII characters is not supported".to_owned(), )); } - constant_time_eq(a.as_bytes(), b.as_bytes()) + cmp::timing_safe_cmp(a.as_bytes(), b.as_bytes()) + } + (Either::B(a), Either::B(b)) => { + a.with_ref(|a| b.with_ref(|b| cmp::timing_safe_cmp(a, b))) } - (Either::B(a), Either::B(b)) => a.with_ref(|a| b.with_ref(|b| constant_time_eq(a, b))), _ => { return Err(vm.new_type_error( "unsupported operand types(s) or combination of types".to_owned(), diff --git a/vm/src/vm/mod.rs b/vm/src/vm/mod.rs index 08fbff94f9..7baaae7770 100644 --- a/vm/src/vm/mod.rs +++ b/vm/src/vm/mod.rs @@ -14,8 +14,6 @@ mod vm_new; mod vm_object; mod vm_ops; -#[cfg(not(feature = "stdio"))] -use crate::builtins::PyNone; use crate::{ AsObject, Py, PyObject, PyObjectRef, PyPayload, PyRef, PyResult, builtins::{ @@ -303,8 +301,7 @@ impl VirtualMachine { #[cfg(any(not(target_arch = "wasm32"), target_os = "wasi"))] { let io = import::import_builtin(self, "_io")?; - #[cfg(feature = "stdio")] - let make_stdio = |name, fd, write| { + let set_stdio = |name, fd, write| { let buffered_stdio = self.state.settings.buffered_stdio; let unbuffered = write && !buffered_stdio; let buf = crate::stdlib::io::open( @@ -335,13 +332,7 @@ impl VirtualMachine { )?; let mode = if write { "w" } else { "r" }; stdio.set_attr("mode", self.ctx.new_str(mode), self)?; - Ok(stdio) - }; - #[cfg(not(feature = "stdio"))] - let make_stdio = |_name, _fd, _write| Ok(PyNone.into_pyobject(self)); - let set_stdio = |name, fd, write| { - let stdio = make_stdio(name, fd, write)?; let dunder_name = self.ctx.intern_str(format!("__{name}__")); self.sys_module.set_attr( dunder_name, // e.g. __stdin__ diff --git a/wasm/demo/package-lock.json b/wasm/demo/package-lock.json index 7bbb974322..01753cf48f 100644 --- a/wasm/demo/package-lock.json +++ b/wasm/demo/package-lock.json @@ -2733,9 +2733,9 @@ } }, "node_modules/http-proxy-middleware": { - "version": "2.0.9", - "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.9.tgz", - "integrity": "sha512-c1IyJYLYppU574+YI7R4QyX2ystMtVXZwIdzazUIPIJsHuWNd+mho2j+bKoHftndicGj9yh+xjd+l0yj7VeT1Q==", + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.7.tgz", + "integrity": "sha512-fgVY8AV7qU7z/MmXJ/rxwbrtQH4jBQ9m7kp3llF0liB7glmFeVZFBepQb32T3y8n8k2+AEYuMPCpinYW+/CuRA==", "dev": true, "license": "MIT", "dependencies": { From 71d8e94325900a0395e82290b1b4877e7d8a3102 Mon Sep 17 00:00:00 2001 From: Jeong YunWon Date: Fri, 25 Apr 2025 15:53:25 +0900 Subject: [PATCH 02/11] DO NOT MERGE! reduce ci not to waste them --- .github/workflows/ci.yaml | 260 +------------------------------------- 1 file changed, 1 insertion(+), 259 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index afd3201e28..01e7828d11 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -112,130 +112,6 @@ env: PYTHON_VERSION: "3.13.1" jobs: - rust_tests: - if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip:ci') }} - env: - RUST_BACKTRACE: full - name: Run rust tests - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [macos-latest, ubuntu-latest, windows-latest] - fail-fast: false - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - with: - components: clippy - - uses: Swatinem/rust-cache@v2 - - - name: Set up the Windows environment - shell: bash - run: | - git config --system core.longpaths true - cargo install --target-dir=target -v cargo-vcpkg - cargo vcpkg -v build - if: runner.os == 'Windows' - - name: Set up the Mac environment - run: brew install autoconf automake libtool - if: runner.os == 'macOS' - - - name: run clippy - run: cargo clippy ${{ env.CARGO_ARGS }} --workspace --all-targets --exclude rustpython_wasm -- -Dwarnings - - - name: run rust tests - run: cargo test --workspace --exclude rustpython_wasm --verbose --features threading ${{ env.CARGO_ARGS }} - if: runner.os != 'macOS' - - name: run rust tests - run: cargo test --workspace --exclude rustpython_wasm --exclude rustpython-jit --verbose --features threading ${{ env.CARGO_ARGS }} - if: runner.os == 'macOS' - - - name: check compilation without threading - run: cargo check ${{ env.CARGO_ARGS }} - - - name: Test example projects - run: - cargo run --manifest-path example_projects/barebone/Cargo.toml - cargo run --manifest-path example_projects/frozen_stdlib/Cargo.toml - if: runner.os == 'Linux' - - - name: prepare AppleSilicon build - uses: dtolnay/rust-toolchain@stable - with: - target: aarch64-apple-darwin - if: runner.os == 'macOS' - - name: Check compilation for Apple Silicon - run: cargo check --target aarch64-apple-darwin - if: runner.os == 'macOS' - - name: prepare iOS build - uses: dtolnay/rust-toolchain@stable - with: - target: aarch64-apple-ios - if: runner.os == 'macOS' - - name: Check compilation for iOS - run: cargo check --target aarch64-apple-ios - if: runner.os == 'macOS' - - exotic_targets: - if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip:ci') }} - name: Ensure compilation on various targets - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - with: - target: i686-unknown-linux-gnu - - - name: Install gcc-multilib and musl-tools - run: sudo apt-get update && sudo apt-get install gcc-multilib musl-tools - - name: Check compilation for x86 32bit - run: cargo check --target i686-unknown-linux-gnu - - - uses: dtolnay/rust-toolchain@stable - with: - target: aarch64-linux-android - - - name: Check compilation for android - run: cargo check --target aarch64-linux-android - - - uses: dtolnay/rust-toolchain@stable - with: - target: aarch64-unknown-linux-gnu - - - name: Install gcc-aarch64-linux-gnu - run: sudo apt install gcc-aarch64-linux-gnu - - name: Check compilation for aarch64 linux gnu - run: cargo check --target aarch64-unknown-linux-gnu - - - uses: dtolnay/rust-toolchain@stable - with: - target: i686-unknown-linux-musl - - - name: Check compilation for musl - run: cargo check --target i686-unknown-linux-musl - - - uses: dtolnay/rust-toolchain@stable - with: - target: x86_64-unknown-freebsd - - - name: Check compilation for freebsd - run: cargo check --target x86_64-unknown-freebsd - - - uses: dtolnay/rust-toolchain@stable - with: - target: x86_64-unknown-freebsd - - - name: Check compilation for freeBSD - run: cargo check --target x86_64-unknown-freebsd - - - name: Prepare repository for redox compilation - run: bash scripts/redox/uncomment-cargo.sh - - name: Check compilation for Redox - uses: coolreader18/redoxer-action@v1 - with: - command: check - args: --ignore-rust-version - snippets_cpython: if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip:ci') }} env: @@ -244,7 +120,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [macos-latest, ubuntu-latest, windows-latest] + os: [ubuntu-latest] fail-fast: false steps: - uses: actions/checkout@v4 @@ -307,137 +183,3 @@ jobs: testvenv/bin/rustpython -m pip install wheel - name: Check whats_left is not broken run: python -I whats_left.py - - lint: - name: Check Rust code with rustfmt and clippy - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - with: - components: rustfmt, clippy - - name: run rustfmt - run: cargo fmt --check - - name: run clippy on wasm - run: cargo clippy --manifest-path=wasm/lib/Cargo.toml -- -Dwarnings - - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - name: install ruff - run: python -m pip install ruff==0.0.291 # astral-sh/ruff#7778 - - name: Ensure docs generate no warnings - run: cargo doc - - name: run python lint - run: ruff extra_tests wasm examples --exclude='./.*',./Lib,./vm/Lib,./benches/ --select=E9,F63,F7,F82 --show-source - - name: install prettier - run: yarn global add prettier && echo "$(yarn global bin)" >>$GITHUB_PATH - - name: check wasm code with prettier - # prettier doesn't handle ignore files very well: https://github.com/prettier/prettier/issues/8506 - run: cd wasm && git ls-files -z | xargs -0 prettier --check -u - # Keep cspell check as the last step. This is optional test. - - name: install extra dictionaries - run: npm install @cspell/dict-en_us @cspell/dict-cpp @cspell/dict-python @cspell/dict-rust @cspell/dict-win32 @cspell/dict-shell - - name: spell checker - uses: streetsidesoftware/cspell-action@v6 - with: - files: '**/*.rs' - incremental_files_only: true - - miri: - if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip:ci') }} - name: Run tests under miri - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@master - with: - toolchain: nightly - components: miri - - - uses: Swatinem/rust-cache@v2 - - name: Run tests under miri - # miri-ignore-leaks because the type-object circular reference means that there will always be - # a memory leak, at least until we have proper cyclic gc - run: MIRIFLAGS='-Zmiri-ignore-leaks' cargo +nightly miri test -p rustpython-vm -- miri_test - - wasm: - if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip:ci') }} - name: Check the WASM package and demo - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - - - uses: Swatinem/rust-cache@v2 - - name: install wasm-pack - run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh - - name: install geckodriver - run: | - wget https://github.com/mozilla/geckodriver/releases/download/v0.36.0/geckodriver-v0.36.0-linux64.tar.gz - mkdir geckodriver - tar -xzf geckodriver-v0.36.0-linux64.tar.gz -C geckodriver - - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - run: python -m pip install -r requirements.txt - working-directory: ./wasm/tests - - uses: actions/setup-node@v4 - with: - cache: "npm" - cache-dependency-path: "wasm/demo/package-lock.json" - - name: run test - run: | - export PATH=$PATH:`pwd`/../../geckodriver - npm install - npm run test - env: - NODE_OPTIONS: "--openssl-legacy-provider" - working-directory: ./wasm/demo - - uses: mwilliamson/setup-wabt-action@v3 - with: { wabt-version: "1.0.36" } - - name: check wasm32-unknown without js - run: | - cd wasm/wasm-unknown-test - cargo build --release --verbose - if wasm-objdump -xj Import target/wasm32-unknown-unknown/release/wasm_unknown_test.wasm; then - echo "ERROR: wasm32-unknown module expects imports from the host environment" >2 - fi - - name: build notebook demo - if: github.ref == 'refs/heads/release' - run: | - npm install - npm run dist - mv dist ../demo/dist/notebook - env: - NODE_OPTIONS: "--openssl-legacy-provider" - working-directory: ./wasm/notebook - - name: Deploy demo to Github Pages - if: success() && github.ref == 'refs/heads/release' - uses: peaceiris/actions-gh-pages@v4 - env: - ACTIONS_DEPLOY_KEY: ${{ secrets.ACTIONS_DEMO_DEPLOY_KEY }} - PUBLISH_DIR: ./wasm/demo/dist - EXTERNAL_REPOSITORY: RustPython/demo - PUBLISH_BRANCH: master - - wasm-wasi: - if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip:ci') }} - name: Run snippets and cpython tests on wasm-wasi - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - with: - target: wasm32-wasip1 - - - uses: Swatinem/rust-cache@v2 - - name: Setup Wasmer - uses: wasmerio/setup-wasmer@v3 - - name: Install clang - run: sudo apt-get update && sudo apt-get install clang -y - - name: build rustpython - run: cargo build --release --target wasm32-wasip1 --features freeze-stdlib,stdlib --verbose - - name: run snippets - run: wasmer run --dir `pwd` target/wasm32-wasip1/release/rustpython.wasm -- `pwd`/extra_tests/snippets/stdlib_random.py - - name: run cpython unittest - run: wasmer run --dir `pwd` target/wasm32-wasip1/release/rustpython.wasm -- `pwd`/Lib/test/test_int.py From 65ab3b5eddce171455711e40604055a108abc171 Mon Sep 17 00:00:00 2001 From: Jeong YunWon Date: Sun, 27 Apr 2025 12:48:43 +0900 Subject: [PATCH 03/11] check if other platforms also hang --- .github/workflows/ci.yaml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 01e7828d11..63a9f5cb44 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -120,7 +120,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-latest] + os: [macos-latest, ubuntu-latest, windows-latest] fail-fast: false steps: - uses: actions/checkout@v4 @@ -151,8 +151,7 @@ jobs: - name: run snippets run: python -m pip install -r requirements.txt && pytest -v working-directory: ./extra_tests - - if: runner.os == 'Linux' - name: run cpython platform-independent tests + - name: run cpython platform-independent tests run: target/release/rustpython -m test -j 1 -u all --slowest --fail-env-changed -v ${{ env.PLATFORM_INDEPENDENT_TESTS }} - if: runner.os == 'Linux' From d9c5c89d08630f7bb08be0c68255459ed425c692 Mon Sep 17 00:00:00 2001 From: Jeong YunWon Date: Tue, 29 Apr 2025 18:54:14 +0900 Subject: [PATCH 04/11] remove platform-independent testes --- .github/workflows/ci.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 63a9f5cb44..73e9b92256 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -151,9 +151,9 @@ jobs: - name: run snippets run: python -m pip install -r requirements.txt && pytest -v working-directory: ./extra_tests - - name: run cpython platform-independent tests - run: - target/release/rustpython -m test -j 1 -u all --slowest --fail-env-changed -v ${{ env.PLATFORM_INDEPENDENT_TESTS }} + # - name: run cpython platform-independent tests + # run: + # target/release/rustpython -m test -j 1 -u all --slowest --fail-env-changed -v ${{ env.PLATFORM_INDEPENDENT_TESTS }} - if: runner.os == 'Linux' name: run cpython platform-dependent tests (Linux) run: target/release/rustpython -m test -j 1 -u all --slowest --fail-env-changed -v -x ${{ env.PLATFORM_INDEPENDENT_TESTS }} From 212944a08b01626bc38fe955210cf85829a1f3f9 Mon Sep 17 00:00:00 2001 From: Jeong YunWon Date: Tue, 29 Apr 2025 19:04:21 +0900 Subject: [PATCH 05/11] bisect try #1 --- .github/workflows/ci.yaml | 41 +++------------------------------------ 1 file changed, 3 insertions(+), 38 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 73e9b92256..994b2c2812 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -73,41 +73,6 @@ env: test_grammar test_range test_index - test_int - test_int_literal - test_isinstance - test_iter - test_iterlen - test_itertools - test_json - test_keyword - test_keywordonlyarg - test_list - test_long - test_longexp - test_math - test_operator - test_ordered_dict - test_pow - test_raise - test_richcmp - test_scope - test_set - test_slice - test_sort - test_string - test_string_literals - test_strtod - test_structseq - test_subclassinit - test_super - test_syntax - test_tuple - test_types - test_unary - test_unpack - test_weakref - test_yield_from # Python version targeted by the CI. PYTHON_VERSION: "3.13.1" @@ -151,9 +116,9 @@ jobs: - name: run snippets run: python -m pip install -r requirements.txt && pytest -v working-directory: ./extra_tests - # - name: run cpython platform-independent tests - # run: - # target/release/rustpython -m test -j 1 -u all --slowest --fail-env-changed -v ${{ env.PLATFORM_INDEPENDENT_TESTS }} + - name: run cpython platform-independent tests + run: + target/release/rustpython -m test -j 1 -u all --slowest --fail-env-changed -v ${{ env.PLATFORM_INDEPENDENT_TESTS }} - if: runner.os == 'Linux' name: run cpython platform-dependent tests (Linux) run: target/release/rustpython -m test -j 1 -u all --slowest --fail-env-changed -v -x ${{ env.PLATFORM_INDEPENDENT_TESTS }} From 2a8e0f1e0a0eade1a8b0a952808b72a33e7a4705 Mon Sep 17 00:00:00 2001 From: Jeong YunWon Date: Tue, 29 Apr 2025 19:14:55 +0900 Subject: [PATCH 06/11] try #2 --- .github/workflows/ci.yaml | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 994b2c2812..b5c28a2633 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -73,6 +73,23 @@ env: test_grammar test_range test_index + test_int + test_int_literal + test_isinstance + test_iter + test_iterlen + test_itertools + test_json + test_keyword + test_keywordonlyarg + test_list + test_long + test_longexp + test_math + test_operator + test_ordered_dict + test_pow + test_raise # Python version targeted by the CI. PYTHON_VERSION: "3.13.1" @@ -85,7 +102,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [macos-latest, ubuntu-latest, windows-latest] + os: [ubuntu-latest] fail-fast: false steps: - uses: actions/checkout@v4 From 8aa9d7d26e6ae9ed61415e465e4a98fba97e088e Mon Sep 17 00:00:00 2001 From: Jeong YunWon Date: Tue, 29 Apr 2025 19:22:28 +0900 Subject: [PATCH 07/11] try 3 --- .github/workflows/ci.yaml | 29 +---------------------------- 1 file changed, 1 insertion(+), 28 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b5c28a2633..7554089f49 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -136,31 +136,4 @@ jobs: - name: run cpython platform-independent tests run: target/release/rustpython -m test -j 1 -u all --slowest --fail-env-changed -v ${{ env.PLATFORM_INDEPENDENT_TESTS }} - - if: runner.os == 'Linux' - name: run cpython platform-dependent tests (Linux) - run: target/release/rustpython -m test -j 1 -u all --slowest --fail-env-changed -v -x ${{ env.PLATFORM_INDEPENDENT_TESTS }} - - if: runner.os == 'macOS' - name: run cpython platform-dependent tests (MacOS) - run: target/release/rustpython -m test -j 1 --slowest --fail-env-changed -v -x ${{ env.PLATFORM_INDEPENDENT_TESTS }} ${{ env.MACOS_SKIPS }} - - if: runner.os == 'Windows' - name: run cpython platform-dependent tests (windows partial - fixme) - run: - target/release/rustpython -m test -j 1 --slowest --fail-env-changed -v -x ${{ env.PLATFORM_INDEPENDENT_TESTS }} ${{ env.WINDOWS_SKIPS }} - - if: runner.os != 'Windows' - name: check that --install-pip succeeds - run: | - mkdir site-packages - target/release/rustpython --install-pip ensurepip --user - target/release/rustpython -m pip install six - - if: runner.os != 'Windows' - name: Check that ensurepip succeeds. - run: | - target/release/rustpython -m ensurepip - target/release/rustpython -c "import pip" - - if: runner.os != 'Windows' - name: Check if pip inside venv is functional - run: | - target/release/rustpython -m venv testvenv - testvenv/bin/rustpython -m pip install wheel - - name: Check whats_left is not broken - run: python -I whats_left.py + From f2f15c75d5136513552e1e6930decac5ce41d40e Mon Sep 17 00:00:00 2001 From: Jeong YunWon Date: Tue, 29 Apr 2025 19:22:36 +0900 Subject: [PATCH 08/11] try 3 --- .github/workflows/ci.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 7554089f49..c8c65a86f4 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -90,6 +90,14 @@ env: test_ordered_dict test_pow test_raise + test_richcmp + test_scope + test_set + test_slice + test_sort + test_string + test_string_literals + test_strtod # Python version targeted by the CI. PYTHON_VERSION: "3.13.1" From 959ffedc8912b318baaf88bc2b822ecd03846fd1 Mon Sep 17 00:00:00 2001 From: Jeong YunWon Date: Tue, 29 Apr 2025 19:29:51 +0900 Subject: [PATCH 09/11] try 4 --- .github/workflows/ci.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c8c65a86f4..75241f2e65 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -98,6 +98,12 @@ env: test_string test_string_literals test_strtod + test_structseq + test_subclassinit + test_super + test_syntax + test_tuple + # Python version targeted by the CI. PYTHON_VERSION: "3.13.1" From 812a9c191cb9c35ec884220cecdbe80a32d2fb14 Mon Sep 17 00:00:00 2001 From: Jeong YunWon Date: Tue, 29 Apr 2025 19:41:20 +0900 Subject: [PATCH 10/11] try 5 --- .github/workflows/ci.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 75241f2e65..0b768e2828 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -103,6 +103,9 @@ env: test_super test_syntax test_tuple + test_types + test_unary + test_unpack # Python version targeted by the CI. PYTHON_VERSION: "3.13.1" From 2df207227d213a06276f1093e6b7cf2fc967e9b3 Mon Sep 17 00:00:00 2001 From: Jeong YunWon Date: Tue, 29 Apr 2025 19:48:16 +0900 Subject: [PATCH 11/11] fully back --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0b768e2828..e468cf3fd5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -106,7 +106,8 @@ env: test_types test_unary test_unpack - + test_weakref + test_yield_from # Python version targeted by the CI. PYTHON_VERSION: "3.13.1"