1use super::{
14 ext::{AsObject, PyRefExact, PyResult},
15 payload::PyObjectPayload,
16 PyAtomicRef,
17};
18use crate::object::traverse::{Traverse, TraverseFn};
19use crate::object::traverse_object::PyObjVTable;
20use crate::{
21 builtins::{PyDictRef, PyType, PyTypeRef},
22 common::{
23 atomic::{OncePtr, PyAtomic, Radium},
24 linked_list::{Link, LinkedList, Pointers},
25 lock::{PyMutex, PyMutexGuard, PyRwLock},
26 refcount::RefCount,
27 },
28 vm::VirtualMachine,
29};
30use itertools::Itertools;
31use std::{
32 any::TypeId,
33 borrow::Borrow,
34 cell::UnsafeCell,
35 fmt,
36 marker::PhantomData,
37 mem::ManuallyDrop,
38 ops::Deref,
39 ptr::{self, NonNull},
40};
41
42#[derive(Debug)]
77pub(super) struct Erased;
78
79pub(super) unsafe fn drop_dealloc_obj<T: PyObjectPayload>(x: *mut PyObject) {
80 drop(Box::from_raw(x as *mut PyInner<T>));
81}
82pub(super) unsafe fn debug_obj<T: PyObjectPayload>(
83 x: &PyObject,
84 f: &mut fmt::Formatter,
85) -> fmt::Result {
86 let x = &*(x as *const PyObject as *const PyInner<T>);
87 fmt::Debug::fmt(x, f)
88}
89
90pub(super) unsafe fn try_trace_obj<T: PyObjectPayload>(x: &PyObject, tracer_fn: &mut TraverseFn) {
92 let x = &*(x as *const PyObject as *const PyInner<T>);
93 let payload = &x.payload;
94 payload.try_traverse(tracer_fn)
95}
96
97#[repr(C)]
101pub(super) struct PyInner<T> {
102 pub(super) ref_count: RefCount,
103 pub(super) typeid: TypeId,
105 pub(super) vtable: &'static PyObjVTable,
106
107 pub(super) typ: PyAtomicRef<PyType>, pub(super) dict: Option<InstanceDict>,
109 pub(super) weak_list: WeakRefList,
110 pub(super) slots: Box<[PyRwLock<Option<PyObjectRef>>]>,
111
112 pub(super) payload: T,
113}
114
115impl<T: fmt::Debug> fmt::Debug for PyInner<T> {
116 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
117 write!(f, "[PyObject {:?}]", &self.payload)
118 }
119}
120
121unsafe impl<T: PyObjectPayload> Traverse for Py<T> {
122 fn traverse(&self, tracer_fn: &mut TraverseFn) {
125 self.0.traverse(tracer_fn)
126 }
127}
128
129unsafe impl Traverse for PyObject {
130 fn traverse(&self, tracer_fn: &mut TraverseFn) {
133 self.0.traverse(tracer_fn)
134 }
135}
136
137pub(super) struct WeakRefList {
138 inner: OncePtr<PyMutex<WeakListInner>>,
139}
140
141impl fmt::Debug for WeakRefList {
142 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
143 f.debug_struct("WeakRefList").finish_non_exhaustive()
144 }
145}
146
147struct WeakListInner {
148 list: LinkedList<WeakLink, Py<PyWeak>>,
149 generic_weakref: Option<NonNull<Py<PyWeak>>>,
150 obj: Option<NonNull<PyObject>>,
151 ref_count: usize,
153}
154
155cfg_if::cfg_if! {
156 if #[cfg(feature = "threading")] {
157 unsafe impl Send for WeakListInner {}
158 unsafe impl Sync for WeakListInner {}
159 }
160}
161
162impl WeakRefList {
163 pub fn new() -> Self {
164 WeakRefList {
165 inner: OncePtr::new(),
166 }
167 }
168
169 fn try_lock(&self) -> Option<PyMutexGuard<'_, WeakListInner>> {
171 self.inner.get().map(|mu| unsafe { mu.as_ref().lock() })
172 }
173
174 fn add(
175 &self,
176 obj: &PyObject,
177 cls: PyTypeRef,
178 cls_is_weakref: bool,
179 callback: Option<PyObjectRef>,
180 dict: Option<PyDictRef>,
181 ) -> PyRef<PyWeak> {
182 let is_generic = cls_is_weakref && callback.is_none();
183 let inner_ptr = self.inner.get_or_init(|| {
184 Box::new(PyMutex::new(WeakListInner {
185 list: LinkedList::default(),
186 generic_weakref: None,
187 obj: Some(NonNull::from(obj)),
188 ref_count: 1,
189 }))
190 });
191 let mut inner = unsafe { inner_ptr.as_ref().lock() };
192 if is_generic {
193 if let Some(generic_weakref) = inner.generic_weakref {
194 let generic_weakref = unsafe { generic_weakref.as_ref() };
195 if generic_weakref.0.ref_count.get() != 0 {
196 return generic_weakref.to_owned();
197 }
198 }
199 }
200 let obj = PyWeak {
201 pointers: Pointers::new(),
202 parent: inner_ptr,
203 callback: UnsafeCell::new(callback),
204 hash: Radium::new(crate::common::hash::SENTINEL),
205 };
206 let weak = PyRef::new_ref(obj, cls, dict);
207 inner.list.push_front(unsafe { ptr::read(&weak) });
210 inner.ref_count += 1;
211 if is_generic {
212 inner.generic_weakref = Some(NonNull::from(&*weak));
213 }
214 weak
215 }
216
217 fn clear(&self) {
218 let to_dealloc = {
219 let ptr = match self.inner.get() {
220 Some(ptr) => ptr,
221 None => return,
222 };
223 let mut inner = unsafe { ptr.as_ref().lock() };
224 inner.obj = None;
225 let mut v = Vec::with_capacity(16);
227 loop {
228 let inner2 = &mut *inner;
229 let iter = inner2
230 .list
231 .drain_filter(|_| true)
232 .filter_map(|wr| {
233 let wr = ManuallyDrop::new(wr);
238
239 if Some(NonNull::from(&**wr)) == inner2.generic_weakref {
240 inner2.generic_weakref = None
241 }
242
243 (wr.as_object().strong_count() > 0).then(|| (*wr).clone())
246 })
247 .take(16);
248 v.extend(iter);
249 if v.is_empty() {
250 break;
251 }
252 PyMutexGuard::unlocked(&mut inner, || {
253 for wr in v.drain(..) {
254 let cb = unsafe { wr.callback.get().replace(None) };
255 if let Some(cb) = cb {
256 crate::vm::thread::with_vm(&cb, |vm| {
257 let _ = cb.call((wr.clone(),), vm);
259 });
260 }
261 }
262 })
263 }
264 inner.ref_count -= 1;
265 (inner.ref_count == 0).then_some(ptr)
266 };
267 if let Some(ptr) = to_dealloc {
268 unsafe { WeakRefList::dealloc(ptr) }
269 }
270 }
271
272 fn count(&self) -> usize {
273 self.try_lock()
274 .map(|inner| inner.ref_count - 1)
277 .unwrap_or(0)
278 }
279
280 unsafe fn dealloc(ptr: NonNull<PyMutex<WeakListInner>>) {
281 drop(Box::from_raw(ptr.as_ptr()));
282 }
283
284 fn get_weak_references(&self) -> Vec<PyRef<PyWeak>> {
285 let inner = match self.try_lock() {
286 Some(inner) => inner,
287 None => return vec![],
288 };
289 let mut v = Vec::with_capacity(inner.ref_count - 1);
290 v.extend(inner.iter().map(|wr| wr.to_owned()));
291 v
292 }
293}
294
295impl WeakListInner {
296 fn iter(&self) -> impl Iterator<Item = &Py<PyWeak>> {
297 self.list.iter().filter(|wr| wr.0.ref_count.get() > 0)
298 }
299}
300
301impl Default for WeakRefList {
302 fn default() -> Self {
303 Self::new()
304 }
305}
306
307struct WeakLink;
308unsafe impl Link for WeakLink {
309 type Handle = PyRef<PyWeak>;
310
311 type Target = Py<PyWeak>;
312
313 #[inline(always)]
314 fn as_raw(handle: &PyRef<PyWeak>) -> NonNull<Self::Target> {
315 NonNull::from(&**handle)
316 }
317
318 #[inline(always)]
319 unsafe fn from_raw(ptr: NonNull<Self::Target>) -> Self::Handle {
320 PyRef::from_raw(ptr.as_ptr())
321 }
322
323 #[inline(always)]
324 unsafe fn pointers(target: NonNull<Self::Target>) -> NonNull<Pointers<Self::Target>> {
325 NonNull::new_unchecked(ptr::addr_of_mut!((*target.as_ptr()).0.payload.pointers))
326 }
327}
328
329#[pyclass(name = "weakref", module = false)]
330#[derive(Debug)]
331pub struct PyWeak {
332 pointers: Pointers<Py<PyWeak>>,
333 parent: NonNull<PyMutex<WeakListInner>>,
334 callback: UnsafeCell<Option<PyObjectRef>>,
336 pub(crate) hash: PyAtomic<crate::common::hash::PyHash>,
337}
338
339cfg_if::cfg_if! {
340 if #[cfg(feature = "threading")] {
341 #[allow(clippy::non_send_fields_in_send_ty)] unsafe impl Send for PyWeak {}
343 unsafe impl Sync for PyWeak {}
344 }
345}
346
347impl PyWeak {
348 pub(crate) fn upgrade(&self) -> Option<PyObjectRef> {
349 let guard = unsafe { self.parent.as_ref().lock() };
350 let obj_ptr = guard.obj?;
351 unsafe {
352 if !obj_ptr.as_ref().0.ref_count.safe_inc() {
353 return None;
354 }
355 Some(PyObjectRef::from_raw(obj_ptr.as_ptr()))
356 }
357 }
358
359 pub(crate) fn is_dead(&self) -> bool {
360 let guard = unsafe { self.parent.as_ref().lock() };
361 guard.obj.is_none()
362 }
363
364 fn drop_inner(&self) {
365 let dealloc = {
366 let mut guard = unsafe { self.parent.as_ref().lock() };
367 let offset = memoffset::offset_of!(PyInner<PyWeak>, payload);
368 let pyinner = (self as *const Self as usize - offset) as *const PyInner<Self>;
369 let node_ptr = unsafe { NonNull::new_unchecked(pyinner as *mut Py<Self>) };
370 std::mem::forget(unsafe { guard.list.remove(node_ptr) });
373 guard.ref_count -= 1;
374 if Some(node_ptr) == guard.generic_weakref {
375 guard.generic_weakref = None;
376 }
377 guard.ref_count == 0
378 };
379 if dealloc {
380 unsafe { WeakRefList::dealloc(self.parent) }
381 }
382 }
383}
384
385impl Drop for PyWeak {
386 #[inline(always)]
387 fn drop(&mut self) {
388 let me: &Self = self;
391 me.drop_inner();
392 }
393}
394
395impl Py<PyWeak> {
396 #[inline(always)]
397 pub fn upgrade(&self) -> Option<PyObjectRef> {
398 PyWeak::upgrade(self)
399 }
400}
401
402#[derive(Debug)]
403pub(super) struct InstanceDict {
404 pub(super) d: PyRwLock<PyDictRef>,
405}
406
407impl From<PyDictRef> for InstanceDict {
408 #[inline(always)]
409 fn from(d: PyDictRef) -> Self {
410 Self::new(d)
411 }
412}
413
414impl InstanceDict {
415 #[inline]
416 pub fn new(d: PyDictRef) -> Self {
417 Self {
418 d: PyRwLock::new(d),
419 }
420 }
421
422 #[inline]
423 pub fn get(&self) -> PyDictRef {
424 self.d.read().clone()
425 }
426
427 #[inline]
428 pub fn set(&self, d: PyDictRef) {
429 self.replace(d);
430 }
431
432 #[inline]
433 pub fn replace(&self, d: PyDictRef) -> PyDictRef {
434 std::mem::replace(&mut self.d.write(), d)
435 }
436}
437
438impl<T: PyObjectPayload> PyInner<T> {
439 fn new(payload: T, typ: PyTypeRef, dict: Option<PyDictRef>) -> Box<Self> {
440 let member_count = typ.slots.member_count;
441 Box::new(PyInner {
442 ref_count: RefCount::new(),
443 typeid: TypeId::of::<T>(),
444 vtable: PyObjVTable::of::<T>(),
445 typ: PyAtomicRef::from(typ),
446 dict: dict.map(InstanceDict::new),
447 weak_list: WeakRefList::new(),
448 payload,
449 slots: std::iter::repeat_with(|| PyRwLock::new(None))
450 .take(member_count)
451 .collect_vec()
452 .into_boxed_slice(),
453 })
454 }
455}
456
457#[repr(transparent)]
463pub struct PyObjectRef {
464 ptr: NonNull<PyObject>,
465}
466
467impl Clone for PyObjectRef {
468 #[inline(always)]
469 fn clone(&self) -> Self {
470 (**self).to_owned()
471 }
472}
473
474cfg_if::cfg_if! {
475 if #[cfg(feature = "threading")] {
476 unsafe impl Send for PyObjectRef {}
477 unsafe impl Sync for PyObjectRef {}
478 }
479}
480
481#[repr(transparent)]
482pub struct PyObject(PyInner<Erased>);
483
484impl Deref for PyObjectRef {
485 type Target = PyObject;
486 #[inline(always)]
487 fn deref(&self) -> &PyObject {
488 unsafe { self.ptr.as_ref() }
489 }
490}
491
492impl ToOwned for PyObject {
493 type Owned = PyObjectRef;
494
495 #[inline(always)]
496 fn to_owned(&self) -> Self::Owned {
497 self.0.ref_count.inc();
498 PyObjectRef {
499 ptr: NonNull::from(self),
500 }
501 }
502}
503
504impl PyObjectRef {
505 #[inline(always)]
506 pub fn into_raw(self) -> *const PyObject {
507 let ptr = self.as_raw();
508 std::mem::forget(self);
509 ptr
510 }
511
512 #[inline(always)]
518 pub unsafe fn from_raw(ptr: *const PyObject) -> Self {
519 Self {
520 ptr: NonNull::new_unchecked(ptr as *mut PyObject),
521 }
522 }
523
524 #[inline(always)]
529 pub fn downcast<T: PyObjectPayload>(self) -> Result<PyRef<T>, Self> {
530 if self.payload_is::<T>() {
531 Ok(unsafe { self.downcast_unchecked() })
532 } else {
533 Err(self)
534 }
535 }
536
537 #[inline(always)]
538 pub fn downcast_ref<T: PyObjectPayload>(&self) -> Option<&Py<T>> {
539 if self.payload_is::<T>() {
540 Some(unsafe { &*(self as *const PyObjectRef as *const PyRef<T>) })
543 } else {
544 None
545 }
546 }
547
548 #[inline(always)]
553 pub unsafe fn downcast_unchecked<T: PyObjectPayload>(self) -> PyRef<T> {
554 let obj = ManuallyDrop::new(self);
557 PyRef {
558 ptr: obj.ptr.cast(),
559 }
560 }
561
562 #[inline(always)]
565 pub unsafe fn downcast_unchecked_ref<T: PyObjectPayload>(&self) -> &Py<T> {
566 debug_assert!(self.payload_is::<T>());
567 &*(self as *const PyObjectRef as *const PyRef<T>)
568 }
569
570 #[inline]
577 pub fn downcast_exact<T: PyObjectPayload + crate::PyPayload>(
578 self,
579 vm: &VirtualMachine,
580 ) -> Result<PyRefExact<T>, Self> {
581 if self.class().is(T::class(&vm.ctx)) {
582 assert!(
584 self.payload_is::<T>(),
585 "obj.__class__ is T::class() but payload is not T"
586 );
587 Ok(unsafe { PyRefExact::new_unchecked(PyRef::from_obj_unchecked(self)) })
589 } else {
590 Err(self)
591 }
592 }
593}
594
595impl PyObject {
596 #[inline(always)]
597 fn weak_ref_list(&self) -> Option<&WeakRefList> {
598 Some(&self.0.weak_list)
599 }
600
601 pub(crate) fn downgrade_with_weakref_typ_opt(
602 &self,
603 callback: Option<PyObjectRef>,
604 typ: PyTypeRef,
606 ) -> Option<PyRef<PyWeak>> {
607 self.weak_ref_list()
608 .map(|wrl| wrl.add(self, typ, true, callback, None))
609 }
610
611 pub(crate) fn downgrade_with_typ(
612 &self,
613 callback: Option<PyObjectRef>,
614 typ: PyTypeRef,
615 vm: &VirtualMachine,
616 ) -> PyResult<PyRef<PyWeak>> {
617 let dict = if typ
618 .slots
619 .flags
620 .has_feature(crate::types::PyTypeFlags::HAS_DICT)
621 {
622 Some(vm.ctx.new_dict())
623 } else {
624 None
625 };
626 let cls_is_weakref = typ.is(vm.ctx.types.weakref_type);
627 let wrl = self.weak_ref_list().ok_or_else(|| {
628 vm.new_type_error(format!(
629 "cannot create weak reference to '{}' object",
630 self.class().name()
631 ))
632 })?;
633 Ok(wrl.add(self, typ, cls_is_weakref, callback, dict))
634 }
635
636 pub fn downgrade(
637 &self,
638 callback: Option<PyObjectRef>,
639 vm: &VirtualMachine,
640 ) -> PyResult<PyRef<PyWeak>> {
641 self.downgrade_with_typ(callback, vm.ctx.types.weakref_type.to_owned(), vm)
642 }
643
644 pub fn get_weak_references(&self) -> Option<Vec<PyRef<PyWeak>>> {
645 self.weak_ref_list().map(|wrl| wrl.get_weak_references())
646 }
647
648 #[inline(always)]
649 pub fn payload_is<T: PyObjectPayload>(&self) -> bool {
650 self.0.typeid == TypeId::of::<T>()
651 }
652
653 #[inline(always)]
658 pub unsafe fn payload_unchecked<T: PyObjectPayload>(&self) -> &T {
659 let inner = unsafe { &*(&self.0 as *const PyInner<Erased> as *const PyInner<T>) };
662 &inner.payload
663 }
664
665 #[inline(always)]
666 pub fn payload<T: PyObjectPayload>(&self) -> Option<&T> {
667 if self.payload_is::<T>() {
668 Some(unsafe { self.payload_unchecked() })
669 } else {
670 None
671 }
672 }
673
674 #[inline(always)]
675 pub fn class(&self) -> &Py<PyType> {
676 self.0.typ.deref()
677 }
678
679 pub fn set_class(&self, typ: PyTypeRef, vm: &VirtualMachine) {
680 self.0.typ.swap_to_temporary_refs(typ, vm);
681 }
682
683 #[inline(always)]
684 pub fn payload_if_exact<T: PyObjectPayload + crate::PyPayload>(
685 &self,
686 vm: &VirtualMachine,
687 ) -> Option<&T> {
688 if self.class().is(T::class(&vm.ctx)) {
689 self.payload()
690 } else {
691 None
692 }
693 }
694
695 #[inline(always)]
696 fn instance_dict(&self) -> Option<&InstanceDict> {
697 self.0.dict.as_ref()
698 }
699
700 #[inline(always)]
701 pub fn dict(&self) -> Option<PyDictRef> {
702 self.instance_dict().map(|d| d.get())
703 }
704
705 pub fn set_dict(&self, dict: PyDictRef) -> Result<(), PyDictRef> {
708 match self.instance_dict() {
709 Some(d) => {
710 d.set(dict);
711 Ok(())
712 }
713 None => Err(dict),
714 }
715 }
716
717 #[inline(always)]
718 pub fn payload_if_subclass<T: crate::PyPayload>(&self, vm: &VirtualMachine) -> Option<&T> {
719 if self.class().fast_issubclass(T::class(&vm.ctx)) {
720 self.payload()
721 } else {
722 None
723 }
724 }
725
726 #[inline(always)]
727 pub fn downcast_ref<T: PyObjectPayload>(&self) -> Option<&Py<T>> {
728 if self.payload_is::<T>() {
729 Some(unsafe { self.downcast_unchecked_ref::<T>() })
732 } else {
733 None
734 }
735 }
736
737 #[inline(always)]
738 pub fn downcast_ref_if_exact<T: PyObjectPayload + crate::PyPayload>(
739 &self,
740 vm: &VirtualMachine,
741 ) -> Option<&Py<T>> {
742 self.class()
743 .is(T::class(&vm.ctx))
744 .then(|| unsafe { self.downcast_unchecked_ref::<T>() })
745 }
746
747 #[inline(always)]
750 pub unsafe fn downcast_unchecked_ref<T: PyObjectPayload>(&self) -> &Py<T> {
751 debug_assert!(self.payload_is::<T>());
752 &*(self as *const PyObject as *const Py<T>)
753 }
754
755 #[inline(always)]
756 pub fn strong_count(&self) -> usize {
757 self.0.ref_count.get()
758 }
759
760 #[inline]
761 pub fn weak_count(&self) -> Option<usize> {
762 self.weak_ref_list().map(|wrl| wrl.count())
763 }
764
765 #[inline(always)]
766 pub fn as_raw(&self) -> *const PyObject {
767 self
768 }
769
770 #[inline(always)] fn drop_slow_inner(&self) -> Result<(), ()> {
772 #[inline(never)]
774 #[cold]
775 fn call_slot_del(
776 zelf: &PyObject,
777 slot_del: fn(&PyObject, &VirtualMachine) -> PyResult<()>,
778 ) -> Result<(), ()> {
779 let ret = crate::vm::thread::with_vm(zelf, |vm| {
780 zelf.0.ref_count.inc();
781 if let Err(e) = slot_del(zelf, vm) {
782 let del_method = zelf.get_class_attr(identifier!(vm, __del__)).unwrap();
783 vm.run_unraisable(e, None, del_method);
784 }
785 zelf.0.ref_count.dec()
786 });
787 match ret {
788 Some(true) => Ok(()),
790 Some(false) => Err(()),
792 None => {
793 warn!("couldn't run __del__ method for object");
794 Ok(())
795 }
796 }
797 }
798
799 let del = self.class().mro_find_map(|cls| cls.slots.del.load());
801 if let Some(slot_del) = del {
802 call_slot_del(self, slot_del)?;
803 }
804 if let Some(wrl) = self.weak_ref_list() {
805 wrl.clear();
806 }
807
808 Ok(())
809 }
810
811 #[inline(never)]
813 unsafe fn drop_slow(ptr: NonNull<PyObject>) {
814 if let Err(()) = ptr.as_ref().drop_slow_inner() {
815 return;
817 }
818 let drop_dealloc = ptr.as_ref().0.vtable.drop_dealloc;
819 drop_dealloc(ptr.as_ptr())
821 }
822
823 pub(crate) unsafe fn mark_intern(&self) {
826 self.0.ref_count.leak();
827 }
828
829 pub(crate) fn is_interned(&self) -> bool {
830 self.0.ref_count.is_leaked()
831 }
832
833 pub(crate) fn get_slot(&self, offset: usize) -> Option<PyObjectRef> {
834 self.0.slots[offset].read().clone()
835 }
836
837 pub(crate) fn set_slot(&self, offset: usize, value: Option<PyObjectRef>) {
838 *self.0.slots[offset].write() = value;
839 }
840}
841
842impl Borrow<PyObject> for PyObjectRef {
843 #[inline(always)]
844 fn borrow(&self) -> &PyObject {
845 self
846 }
847}
848
849impl AsRef<PyObject> for PyObjectRef {
850 #[inline(always)]
851 fn as_ref(&self) -> &PyObject {
852 self
853 }
854}
855
856impl AsRef<PyObject> for PyObject {
857 #[inline(always)]
858 fn as_ref(&self) -> &PyObject {
859 self
860 }
861}
862
863impl<'a, T: PyObjectPayload> From<&'a Py<T>> for &'a PyObject {
864 #[inline(always)]
865 fn from(py_ref: &'a Py<T>) -> Self {
866 py_ref.as_object()
867 }
868}
869
870impl Drop for PyObjectRef {
871 #[inline]
872 fn drop(&mut self) {
873 if self.0.ref_count.dec() {
874 unsafe { PyObject::drop_slow(self.ptr) }
875 }
876 }
877}
878
879impl fmt::Debug for PyObject {
880 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
881 unsafe { (self.0.vtable.debug)(self, f) }
884 }
885}
886
887impl fmt::Debug for PyObjectRef {
888 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
889 self.as_object().fmt(f)
890 }
891}
892
893#[repr(transparent)]
894pub struct Py<T: PyObjectPayload>(PyInner<T>);
895
896impl<T: PyObjectPayload> Py<T> {
897 pub fn downgrade(
898 &self,
899 callback: Option<PyObjectRef>,
900 vm: &VirtualMachine,
901 ) -> PyResult<PyWeakRef<T>> {
902 Ok(PyWeakRef {
903 weak: self.as_object().downgrade(callback, vm)?,
904 _marker: PhantomData,
905 })
906 }
907}
908
909impl<T: PyObjectPayload> ToOwned for Py<T> {
910 type Owned = PyRef<T>;
911
912 #[inline(always)]
913 fn to_owned(&self) -> Self::Owned {
914 self.0.ref_count.inc();
915 PyRef {
916 ptr: NonNull::from(self),
917 }
918 }
919}
920
921impl<T: PyObjectPayload> Deref for Py<T> {
922 type Target = T;
923
924 #[inline(always)]
925 fn deref(&self) -> &Self::Target {
926 &self.0.payload
927 }
928}
929
930impl<T: PyObjectPayload> Borrow<PyObject> for Py<T> {
931 #[inline(always)]
932 fn borrow(&self) -> &PyObject {
933 unsafe { &*(&self.0 as *const PyInner<T> as *const PyObject) }
934 }
935}
936
937impl<T> std::hash::Hash for Py<T>
938where
939 T: std::hash::Hash + PyObjectPayload,
940{
941 #[inline]
942 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
943 self.deref().hash(state)
944 }
945}
946
947impl<T> PartialEq for Py<T>
948where
949 T: PartialEq + PyObjectPayload,
950{
951 #[inline]
952 fn eq(&self, other: &Self) -> bool {
953 self.deref().eq(other.deref())
954 }
955}
956
957impl<T> Eq for Py<T> where T: Eq + PyObjectPayload {}
958
959impl<T> AsRef<PyObject> for Py<T>
960where
961 T: PyObjectPayload,
962{
963 #[inline(always)]
964 fn as_ref(&self) -> &PyObject {
965 self.borrow()
966 }
967}
968
969impl<T: PyObjectPayload> fmt::Debug for Py<T> {
970 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
971 (**self).fmt(f)
972 }
973}
974
975#[repr(transparent)]
985pub struct PyRef<T: PyObjectPayload> {
986 ptr: NonNull<Py<T>>,
987}
988
989cfg_if::cfg_if! {
990 if #[cfg(feature = "threading")] {
991 unsafe impl<T: PyObjectPayload> Send for PyRef<T> {}
992 unsafe impl<T: PyObjectPayload> Sync for PyRef<T> {}
993 }
994}
995
996impl<T: PyObjectPayload> fmt::Debug for PyRef<T> {
997 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
998 (**self).fmt(f)
999 }
1000}
1001
1002impl<T: PyObjectPayload> Drop for PyRef<T> {
1003 #[inline]
1004 fn drop(&mut self) {
1005 if self.0.ref_count.dec() {
1006 unsafe { PyObject::drop_slow(self.ptr.cast::<PyObject>()) }
1007 }
1008 }
1009}
1010
1011impl<T: PyObjectPayload> Clone for PyRef<T> {
1012 #[inline(always)]
1013 fn clone(&self) -> Self {
1014 (**self).to_owned()
1015 }
1016}
1017
1018impl<T: PyObjectPayload> PyRef<T> {
1019 #[inline(always)]
1020 pub(crate) unsafe fn from_raw(raw: *const Py<T>) -> Self {
1021 Self {
1022 ptr: NonNull::new_unchecked(raw as *mut _),
1023 }
1024 }
1025
1026 #[inline(always)]
1028 unsafe fn from_obj_unchecked(obj: PyObjectRef) -> Self {
1029 debug_assert!(obj.payload_is::<T>());
1030 let obj = ManuallyDrop::new(obj);
1031 Self {
1032 ptr: obj.ptr.cast(),
1033 }
1034 }
1035
1036 #[inline(always)]
1037 pub fn new_ref(payload: T, typ: crate::builtins::PyTypeRef, dict: Option<PyDictRef>) -> Self {
1038 let inner = Box::into_raw(PyInner::new(payload, typ, dict));
1039 Self {
1040 ptr: unsafe { NonNull::new_unchecked(inner.cast::<Py<T>>()) },
1041 }
1042 }
1043
1044 pub fn leak(pyref: Self) -> &'static Py<T> {
1045 let ptr = pyref.ptr;
1046 std::mem::forget(pyref);
1047 unsafe { &*ptr.as_ptr() }
1048 }
1049}
1050
1051impl<T> Borrow<PyObject> for PyRef<T>
1052where
1053 T: PyObjectPayload,
1054{
1055 #[inline(always)]
1056 fn borrow(&self) -> &PyObject {
1057 (**self).as_object()
1058 }
1059}
1060
1061impl<T> AsRef<PyObject> for PyRef<T>
1062where
1063 T: PyObjectPayload,
1064{
1065 #[inline(always)]
1066 fn as_ref(&self) -> &PyObject {
1067 self.borrow()
1068 }
1069}
1070
1071impl<T> From<PyRef<T>> for PyObjectRef
1072where
1073 T: PyObjectPayload,
1074{
1075 #[inline]
1076 fn from(value: PyRef<T>) -> Self {
1077 let me = ManuallyDrop::new(value);
1078 PyObjectRef { ptr: me.ptr.cast() }
1079 }
1080}
1081
1082impl<T> Borrow<Py<T>> for PyRef<T>
1083where
1084 T: PyObjectPayload,
1085{
1086 #[inline(always)]
1087 fn borrow(&self) -> &Py<T> {
1088 self
1089 }
1090}
1091
1092impl<T> AsRef<Py<T>> for PyRef<T>
1093where
1094 T: PyObjectPayload,
1095{
1096 #[inline(always)]
1097 fn as_ref(&self) -> &Py<T> {
1098 self
1099 }
1100}
1101
1102impl<T> Deref for PyRef<T>
1103where
1104 T: PyObjectPayload,
1105{
1106 type Target = Py<T>;
1107
1108 #[inline(always)]
1109 fn deref(&self) -> &Py<T> {
1110 unsafe { self.ptr.as_ref() }
1111 }
1112}
1113
1114impl<T> std::hash::Hash for PyRef<T>
1115where
1116 T: std::hash::Hash + PyObjectPayload,
1117{
1118 #[inline]
1119 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
1120 self.deref().hash(state)
1121 }
1122}
1123
1124impl<T> PartialEq for PyRef<T>
1125where
1126 T: PartialEq + PyObjectPayload,
1127{
1128 #[inline]
1129 fn eq(&self, other: &Self) -> bool {
1130 self.deref().eq(other.deref())
1131 }
1132}
1133
1134impl<T> Eq for PyRef<T> where T: Eq + PyObjectPayload {}
1135
1136#[repr(transparent)]
1137pub struct PyWeakRef<T: PyObjectPayload> {
1138 weak: PyRef<PyWeak>,
1139 _marker: PhantomData<T>,
1140}
1141
1142impl<T: PyObjectPayload> PyWeakRef<T> {
1143 pub fn upgrade(&self) -> Option<PyRef<T>> {
1144 self.weak
1145 .upgrade()
1146 .map(|obj| unsafe { PyRef::from_obj_unchecked(obj) })
1148 }
1149}
1150
1151macro_rules! partially_init {
1154 (
1155 $ty:path {$($init_field:ident: $init_value:expr),*$(,)?},
1156 Uninit { $($uninit_field:ident),*$(,)? }$(,)?
1157 ) => {{
1158 #[allow(clippy::diverging_sub_expression)] if false {
1162 #[allow(invalid_value, dead_code, unreachable_code)]
1163 let _ = {$ty {
1164 $($init_field: $init_value,)*
1165 $($uninit_field: unreachable!(),)*
1166 }};
1167 }
1168 let mut m = ::std::mem::MaybeUninit::<$ty>::uninit();
1169 #[allow(unused_unsafe)]
1170 unsafe {
1171 $(::std::ptr::write(&mut (*m.as_mut_ptr()).$init_field, $init_value);)*
1172 }
1173 m
1174 }};
1175}
1176
1177pub(crate) fn init_type_hierarchy() -> (PyTypeRef, PyTypeRef, PyTypeRef) {
1178 use crate::{builtins::object, class::PyClassImpl};
1179 use std::mem::MaybeUninit;
1180
1181 let (type_type, object_type) = {
1186 static_assertions::assert_eq_size!(MaybeUninit<PyInner<PyType>>, PyInner<PyType>);
1189 static_assertions::assert_eq_align!(MaybeUninit<PyInner<PyType>>, PyInner<PyType>);
1190
1191 let type_payload = PyType {
1192 base: None,
1193 bases: PyRwLock::default(),
1194 mro: PyRwLock::default(),
1195 subclasses: PyRwLock::default(),
1196 attributes: PyRwLock::new(Default::default()),
1197 slots: PyType::make_slots(),
1198 heaptype_ext: None,
1199 };
1200 let object_payload = PyType {
1201 base: None,
1202 bases: PyRwLock::default(),
1203 mro: PyRwLock::default(),
1204 subclasses: PyRwLock::default(),
1205 attributes: PyRwLock::new(Default::default()),
1206 slots: object::PyBaseObject::make_slots(),
1207 heaptype_ext: None,
1208 };
1209 let type_type_ptr = Box::into_raw(Box::new(partially_init!(
1210 PyInner::<PyType> {
1211 ref_count: RefCount::new(),
1212 typeid: TypeId::of::<PyType>(),
1213 vtable: PyObjVTable::of::<PyType>(),
1214 dict: None,
1215 weak_list: WeakRefList::new(),
1216 payload: type_payload,
1217 slots: Box::new([]),
1218 },
1219 Uninit { typ }
1220 )));
1221 let object_type_ptr = Box::into_raw(Box::new(partially_init!(
1222 PyInner::<PyType> {
1223 ref_count: RefCount::new(),
1224 typeid: TypeId::of::<PyType>(),
1225 vtable: PyObjVTable::of::<PyType>(),
1226 dict: None,
1227 weak_list: WeakRefList::new(),
1228 payload: object_payload,
1229 slots: Box::new([]),
1230 },
1231 Uninit { typ },
1232 )));
1233
1234 let object_type_ptr = object_type_ptr as *mut PyInner<PyType>;
1235 let type_type_ptr = type_type_ptr as *mut PyInner<PyType>;
1236
1237 unsafe {
1238 (*type_type_ptr).ref_count.inc();
1239 let type_type = PyTypeRef::from_raw(type_type_ptr.cast());
1240 ptr::write(&mut (*object_type_ptr).typ, PyAtomicRef::from(type_type));
1241 (*type_type_ptr).ref_count.inc();
1242 let type_type = PyTypeRef::from_raw(type_type_ptr.cast());
1243 ptr::write(&mut (*type_type_ptr).typ, PyAtomicRef::from(type_type));
1244
1245 let object_type = PyTypeRef::from_raw(object_type_ptr.cast());
1246
1247 (*type_type_ptr).payload.mro = PyRwLock::new(vec![object_type.clone()]);
1248 (*type_type_ptr).payload.bases = PyRwLock::new(vec![object_type.clone()]);
1249 (*type_type_ptr).payload.base = Some(object_type.clone());
1250
1251 let type_type = PyTypeRef::from_raw(type_type_ptr.cast());
1252
1253 (type_type, object_type)
1254 }
1255 };
1256
1257 let weakref_type = PyType {
1258 base: Some(object_type.clone()),
1259 bases: PyRwLock::new(vec![object_type.clone()]),
1260 mro: PyRwLock::new(vec![object_type.clone()]),
1261 subclasses: PyRwLock::default(),
1262 attributes: PyRwLock::default(),
1263 slots: PyWeak::make_slots(),
1264 heaptype_ext: None,
1265 };
1266 let weakref_type = PyRef::new_ref(weakref_type, type_type.clone(), None);
1267
1268 object_type.subclasses.write().push(
1269 type_type
1270 .as_object()
1271 .downgrade_with_weakref_typ_opt(None, weakref_type.clone())
1272 .unwrap(),
1273 );
1274
1275 object_type.subclasses.write().push(
1276 weakref_type
1277 .as_object()
1278 .downgrade_with_weakref_typ_opt(None, weakref_type.clone())
1279 .unwrap(),
1280 );
1281
1282 (type_type, object_type, weakref_type)
1283}
1284
1285#[cfg(test)]
1286mod tests {
1287 use super::*;
1288
1289 #[test]
1290 fn miri_test_type_initialization() {
1291 let _ = init_type_hierarchy();
1292 }
1293
1294 #[test]
1295 fn miri_test_drop() {
1296 let ctx = crate::Context::genesis();
1297 let obj = ctx.new_bytes(b"dfghjkl".to_vec());
1298 drop(obj);
1299 }
1300}