X Tutup
Skip to content
Open
48 changes: 48 additions & 0 deletions Lib/test/_test_gc_fast_cycles.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# Run by test_gc.
from test import support
import _testinternalcapi
import gc
import unittest

class IncrementalGCTests(unittest.TestCase):

# Use small increments to emulate longer running process in a shorter time
@support.gc_threshold(200, 10)
def test_incremental_gc_handles_fast_cycle_creation(self):

class LinkedList:

#Use slots to reduce number of implicit objects
__slots__ = "next", "prev", "surprise"

def __init__(self, next=None, prev=None):
self.next = next
if next is not None:
next.prev = self
self.prev = prev
if prev is not None:
prev.next = self

def make_ll(depth):
head = LinkedList()
for i in range(depth):
head = LinkedList(head, head.prev)
return head

head = make_ll(1000)

assert(gc.isenabled())
olds = []
initial_heap_size = _testinternalcapi.get_tracked_heap_size()
for i in range(20_000):
newhead = make_ll(20)
newhead.surprise = head
olds.append(newhead)
if len(olds) == 20:
new_objects = _testinternalcapi.get_tracked_heap_size() - initial_heap_size
self.assertLess(new_objects, 27_000, f"Heap growing. Reached limit after {i} iterations")
del olds[:]


if __name__ == "__main__":
unittest.main()
115 changes: 98 additions & 17 deletions Lib/test/test_gc.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,8 @@ def test_function(self):
# is 3 because it includes f's code object.
self.assertIn(gc.collect(), (2, 3))

# TODO: RUSTPYTHON - weakref clear ordering differs from 3.15+
@unittest.expectedFailure
def test_function_tp_clear_leaves_consistent_state(self):
# https://github.com/python/cpython/issues/91636
code = """if 1:
Expand All @@ -262,9 +264,11 @@ class Cyclic(tuple):
# finalizer.
def __del__(self):

# 5. Create a weakref to `func` now. If we had created
# it earlier, it would have been cleared by the
# garbage collector before calling the finalizers.
# 5. Create a weakref to `func` now. In previous
# versions of Python, this would avoid having it
# cleared by the garbage collector before calling
# the finalizers. Now, weakrefs get cleared after
# calling finalizers.
self[1].ref = weakref.ref(self[0])

# 6. Drop the global reference to `latefin`. The only
Expand Down Expand Up @@ -293,16 +297,42 @@ def func():
# which will find `cyc` and `func` as garbage.
gc.collect()

# 9. Previously, this would crash because `func_qualname`
# had been NULL-ed out by func_clear().
# 9. Previously, this would crash because the weakref
# created in the finalizer revealed the function after
# `tp_clear` was called and `func_qualname`
# had been NULL-ed out by func_clear(). Now, we clear
# weakrefs to unreachable objects before calling `tp_clear`
# but after calling finalizers.
print(f"{func=}")
"""
# We're mostly just checking that this doesn't crash.
rc, stdout, stderr = assert_python_ok("-c", code)
self.assertEqual(rc, 0)
self.assertRegex(stdout, rb"""\A\s*func=<function at \S+>\s*\z""")
# The `func` global is None because the weakref was cleared.
self.assertRegex(stdout, rb"""\A\s*func=None""")
self.assertFalse(stderr)

# TODO: RUSTPYTHON - _datetime module not available
@unittest.expectedFailure
def test_datetime_weakref_cycle(self):
# https://github.com/python/cpython/issues/132413
# If the weakref used by the datetime extension gets cleared by the GC (due to being
# in an unreachable cycle) then datetime functions would crash (get_module_state()
# was returning a NULL pointer). This bug is fixed by clearing weakrefs without
# callbacks *after* running finalizers.
code = """if 1:
import _datetime
class C:
def __del__(self):
print('__del__ called')
_datetime.timedelta(days=1) # crash?

l = [C()]
l.append(l)
"""
rc, stdout, stderr = assert_python_ok("-c", code)
self.assertEqual(rc, 0)
self.assertEqual(stdout.strip(), b'__del__ called')

@refcount_test
def test_frame(self):
def f():
Expand Down Expand Up @@ -652,9 +682,8 @@ def callback(ignored):
gc.collect()
self.assertEqual(len(ouch), 2) # else the callbacks didn't run
for x in ouch:
# If the callback resurrected one of these guys, the instance
# would be damaged, with an empty __dict__.
self.assertEqual(x, None)
# The weakref should be cleared before executing the callback.
self.assertIsNone(x)

def test_bug21435(self):
# This is a poor test - its only virtue is that it happened to
Expand Down Expand Up @@ -821,11 +850,15 @@ def test_get_stats(self):
self.assertEqual(len(stats), 3)
for st in stats:
self.assertIsInstance(st, dict)
self.assertEqual(set(st),
{"collected", "collections", "uncollectable"})
self.assertEqual(
set(st),
{"collected", "collections", "uncollectable", "candidates", "duration"}
)
self.assertGreaterEqual(st["collected"], 0)
self.assertGreaterEqual(st["collections"], 0)
self.assertGreaterEqual(st["uncollectable"], 0)
self.assertGreaterEqual(st["candidates"], 0)
self.assertGreaterEqual(st["duration"], 0)
# Check that collection counts are incremented correctly
if gc.isenabled():
self.addCleanup(gc.enable)
Expand All @@ -836,11 +869,25 @@ def test_get_stats(self):
self.assertEqual(new[0]["collections"], old[0]["collections"] + 1)
self.assertEqual(new[1]["collections"], old[1]["collections"])
self.assertEqual(new[2]["collections"], old[2]["collections"])
self.assertGreater(new[0]["duration"], old[0]["duration"])
self.assertEqual(new[1]["duration"], old[1]["duration"])
self.assertEqual(new[2]["duration"], old[2]["duration"])
for stat in ["collected", "uncollectable", "candidates"]:
self.assertGreaterEqual(new[0][stat], old[0][stat])
self.assertEqual(new[1][stat], old[1][stat])
self.assertEqual(new[2][stat], old[2][stat])
gc.collect(2)
new = gc.get_stats()
self.assertEqual(new[0]["collections"], old[0]["collections"] + 1)
old, new = new, gc.get_stats()
self.assertEqual(new[0]["collections"], old[0]["collections"])
self.assertEqual(new[1]["collections"], old[1]["collections"])
self.assertEqual(new[2]["collections"], old[2]["collections"] + 1)
self.assertEqual(new[0]["duration"], old[0]["duration"])
self.assertEqual(new[1]["duration"], old[1]["duration"])
self.assertGreater(new[2]["duration"], old[2]["duration"])
for stat in ["collected", "uncollectable", "candidates"]:
self.assertEqual(new[0][stat], old[0][stat])
self.assertEqual(new[1][stat], old[1][stat])
self.assertGreaterEqual(new[2][stat], old[2][stat])

def test_freeze(self):
gc.freeze()
Expand Down Expand Up @@ -1156,6 +1203,37 @@ def test_something(self):
""")
assert_python_ok("-c", source)

def test_do_not_cleanup_type_subclasses_before_finalization(self):
# See https://github.com/python/cpython/issues/135552
# If we cleanup weakrefs for tp_subclasses before calling
# the finalizer (__del__) then the line `fail = BaseNode.next.next`
# should fail because we are trying to access a subclass
# attribute. But subclass type cache was not properly invalidated.
code = """
class BaseNode:
def __del__(self):
BaseNode.next = BaseNode.next.next
fail = BaseNode.next.next

class Node(BaseNode):
pass

BaseNode.next = Node()
BaseNode.next.next = Node()
"""
# this test checks garbage collection while interp
# finalization
assert_python_ok("-c", textwrap.dedent(code))

code_inside_function = textwrap.dedent(F"""
def test():
{textwrap.indent(code, ' ')}

test()
""")
# this test checks regular garbage collection
assert_python_ok("-c", code_inside_function)


@unittest.skipUnless(Py_GIL_DISABLED, "requires free-threaded GC")
@unittest.skipIf(_testinternalcapi is None, "requires _testinternalcapi")
Expand Down Expand Up @@ -1260,9 +1338,11 @@ def test_collect(self):
# Check that we got the right info dict for all callbacks
for v in self.visit:
info = v[2]
self.assertTrue("generation" in info)
self.assertTrue("collected" in info)
self.assertTrue("uncollectable" in info)
self.assertIn("generation", info)
self.assertIn("collected", info)
self.assertIn("uncollectable", info)
self.assertIn("candidates", info)
self.assertIn("duration", info)

def test_collect_generation(self):
self.preclean()
Expand Down Expand Up @@ -1450,6 +1530,7 @@ def callback(ignored):
self.assertEqual(x, None)

@gc_threshold(1000, 0, 0)
@unittest.skipIf(Py_GIL_DISABLED, "requires GC generations or increments")
def test_bug1055820d(self):
# Corresponds to temp2d.py in the bug report. This is very much like
# test_bug1055820c, but uses a __del__ method instead of a weakref
Expand Down
1 change: 0 additions & 1 deletion Lib/test/test_symtable.py
Original file line number Diff line number Diff line change
Expand Up @@ -561,7 +561,6 @@ def get_identifiers_recursive(self, st, res):
for ch in st.get_children():
self.get_identifiers_recursive(ch, res)

@unittest.expectedFailure # TODO: RUSTPYTHON; AssertionError: 2 != 1
def test_loopvar_in_only_one_scope(self):
# ensure that the loop variable appears only once in the symtable
comps = [
Expand Down
21 changes: 7 additions & 14 deletions crates/codegen/src/symboltable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2037,20 +2037,13 @@ impl SymbolTableBuilder {
self.line_index_start(range),
);

// Mark non-generator comprehensions as inlined (PEP 709)
// inline_comp = entry->ste_comprehension && !entry->ste_generator && !ste->ste_can_see_class_scope
// We check is_generator and can_see_class_scope of parent
let parent_can_see_class = self
.tables
.get(self.tables.len().saturating_sub(2))
.map(|t| t.can_see_class_scope)
.unwrap_or(false);
if !is_generator
&& !parent_can_see_class
&& let Some(table) = self.tables.last_mut()
{
table.comp_inlined = true;
}
// PEP 709: inlined comprehensions are not yet implemented in the
// compiler (is_inlined_comprehension_context always returns false),
// so do NOT mark comp_inlined here. Setting it would cause the
// symbol-table analyzer to merge comprehension-local symbols into
// the parent scope, while the compiler still emits a separate code
// object — leading to the merged symbols being missing from the
// comprehension's own symbol table lookup.

// Register the passed argument to the generator function as the name ".0"
self.register_name(".0", SymbolUsage::Parameter, range)?;
Expand Down
18 changes: 8 additions & 10 deletions crates/stdlib/src/_asyncio.rs
Original file line number Diff line number Diff line change
Expand Up @@ -499,7 +499,8 @@ pub(crate) mod _asyncio {
}

fn make_cancelled_error_impl(&self, vm: &VirtualMachine) -> PyBaseExceptionRef {
if let Some(exc) = self.fut_cancelled_exc.read().clone()
// If a saved CancelledError exists, take it (clearing the stored reference)
if let Some(exc) = self.fut_cancelled_exc.write().take()
&& let Ok(exc) = exc.downcast::<PyBaseException>()
{
return exc;
Expand All @@ -508,12 +509,10 @@ pub(crate) mod _asyncio {
let msg = self.fut_cancel_msg.read().clone();
let args = if let Some(m) = msg { vec![m] } else { vec![] };

let exc = match get_cancelled_error_type(vm) {
match get_cancelled_error_type(vm) {
Ok(cancelled_error) => vm.new_exception(cancelled_error, args),
Err(_) => vm.new_runtime_error("cancelled"),
};
*self.fut_cancelled_exc.write() = Some(exc.clone().into());
exc
}
}

fn schedule_callbacks(zelf: &PyRef<Self>, vm: &VirtualMachine) -> PyResult<()> {
Expand Down Expand Up @@ -1309,7 +1308,8 @@ pub(crate) mod _asyncio {
}

fn make_cancelled_error_impl(&self, vm: &VirtualMachine) -> PyBaseExceptionRef {
if let Some(exc) = self.base.fut_cancelled_exc.read().clone()
// If a saved CancelledError exists, take it (clearing the stored reference)
if let Some(exc) = self.base.fut_cancelled_exc.write().take()
&& let Ok(exc) = exc.downcast::<PyBaseException>()
{
return exc;
Expand All @@ -1318,12 +1318,10 @@ pub(crate) mod _asyncio {
let msg = self.base.fut_cancel_msg.read().clone();
let args = if let Some(m) = msg { vec![m] } else { vec![] };

let exc = match get_cancelled_error_type(vm) {
match get_cancelled_error_type(vm) {
Ok(cancelled_error) => vm.new_exception(cancelled_error, args),
Err(_) => vm.new_runtime_error("cancelled"),
};
*self.base.fut_cancelled_exc.write() = Some(exc.clone().into());
exc
}
}

#[pymethod]
Expand Down
28 changes: 18 additions & 10 deletions crates/stdlib/src/socket.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1384,13 +1384,20 @@ mod _socket {

impl DefaultConstructor for PySocket {}

#[derive(FromArgs)]
pub struct SocketInitArgs {
#[pyarg(any, optional)]
family: OptionalArg<i32>,
#[pyarg(any, optional)]
r#type: OptionalArg<i32>,
#[pyarg(any, optional)]
proto: OptionalArg<i32>,
#[pyarg(any, optional)]
fileno: OptionalOption<PyObjectRef>,
}

impl Initializer for PySocket {
type Args = (
OptionalArg<i32>,
OptionalArg<i32>,
OptionalArg<i32>,
OptionalOption<PyObjectRef>,
);
type Args = SocketInitArgs;

fn init(zelf: PyRef<Self>, args: Self::Args, vm: &VirtualMachine) -> PyResult<()> {
Self::_init(zelf, args, vm).map_err(|e| e.into_pyexception(vm))
Expand All @@ -1414,13 +1421,14 @@ mod _socket {
impl PySocket {
fn _init(
zelf: PyRef<Self>,
(family, socket_kind, proto, fileno): <Self as Initializer>::Args,
args: <Self as Initializer>::Args,
vm: &VirtualMachine,
) -> Result<(), IoOrPyException> {
let mut family = family.unwrap_or(-1);
let mut socket_kind = socket_kind.unwrap_or(-1);
let mut proto = proto.unwrap_or(-1);
let mut family = args.family.unwrap_or(-1);
let mut socket_kind = args.r#type.unwrap_or(-1);
let mut proto = args.proto.unwrap_or(-1);

let fileno = args.fileno;
let sock;

// On Windows, fileno can be bytes from socket.share() for fromshare()
Expand Down
Loading
Loading
X Tutup