Upload 8 files
Browse files- cpython_chunk_0.py +0 -0
- cpython_chunk_10.py +1490 -0
- cpython_chunk_11.py +0 -0
- cpython_chunk_13.py +0 -0
- cpython_chunk_15.py +0 -0
- cpython_chunk_18.py +0 -0
- cpython_chunk_2.py +0 -0
- cpython_chunk_24.py +0 -0
cpython_chunk_0.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
cpython_chunk_10.py
ADDED
|
@@ -0,0 +1,1490 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Auto-generated from cpython_chunk_10.txt
|
| 2 |
+
|
| 3 |
+
TEXT_DATA = r"""
|
| 4 |
+
# The bytecode interpreter
|
| 5 |
+
This document describes the workings and implementation of the bytecode
|
| 6 |
+
interpreter, the part of python that executes compiled Python code. Its
|
| 7 |
+
entry point is in [Python/ceval.c](../Python/ceval.c).
|
| 8 |
+
At a high level, the interpreter consists of a loop that iterates over the
|
| 9 |
+
bytecode instructions, executing each of them via a switch statement that
|
| 10 |
+
has a case implementing each opcode. This switch statement is generated
|
| 11 |
+
from the instruction definitions in [Python/bytecodes.c](../Python/bytecodes.c)
|
| 12 |
+
which are written in [a DSL](../Tools/cases_generator/interpreter_definition.md)
|
| 13 |
+
developed for this purpose.
|
| 14 |
+
Recall that the [Python Compiler](compiler.md) produces a [`CodeObject`](code_objects.md),
|
| 15 |
+
which contains the bytecode instructions along with static data that is required to execute them,
|
| 16 |
+
such as the consts list, variable names,
|
| 17 |
+
[exception table](exception_handling.md#format-of-the-exception-table), and so on.
|
| 18 |
+
When the interpreter's
|
| 19 |
+
[`PyEval_EvalCode()`](https://docs.python.org/3.14/c-api/veryhigh.html#c.PyEval_EvalCode)
|
| 20 |
+
function is called to execute a `CodeObject`, it constructs a [`Frame`](frames.md) and calls
|
| 21 |
+
[`_PyEval_EvalFrame()`](https://docs.python.org/3.14/c-api/veryhigh.html#c.PyEval_EvalCode)
|
| 22 |
+
to execute the code object in this frame. The frame holds the dynamic state of the
|
| 23 |
+
`CodeObject`'s execution, including the instruction pointer, the globals and builtins.
|
| 24 |
+
It also has a reference to the `CodeObject` itself.
|
| 25 |
+
In addition to the frame, `_PyEval_EvalFrame()` also receives a
|
| 26 |
+
[`Thread State`](https://docs.python.org/3/c-api/init.html#c.PyThreadState)
|
| 27 |
+
object, `tstate`, which includes things like the exception state and the
|
| 28 |
+
recursion depth. The thread state also provides access to the per-interpreter
|
| 29 |
+
state (`tstate->interp`), which has a pointer to the per-runtime (that is,
|
| 30 |
+
truly global) state (`tstate->interp->runtime`).
|
| 31 |
+
Finally, `_PyEval_EvalFrame()` receives an integer argument `throwflag`
|
| 32 |
+
which, when nonzero, indicates that the interpreter should just raise the current exception
|
| 33 |
+
(this is used in the implementation of
|
| 34 |
+
[`gen.throw`](https://docs.python.org/3.14/reference/expressions.html#generator.throw).
|
| 35 |
+
By default, [`_PyEval_EvalFrame()`](https://docs.python.org/3.14/c-api/veryhigh.html#c.PyEval_EvalCode)
|
| 36 |
+
simply calls [`_PyEval_EvalFrameDefault()`] to execute the frame. However, as per
|
| 37 |
+
[`PEP 523`](https://peps.python.org/pep-0523/) this is configurable by setting
|
| 38 |
+
`interp->eval_frame`. In the following, we describe the default function,
|
| 39 |
+
`_PyEval_EvalFrameDefault()`.
|
| 40 |
+
## Instruction decoding
|
| 41 |
+
The first task of the interpreter is to decode the bytecode instructions.
|
| 42 |
+
Bytecode is stored as an array of 16-bit code units (`_Py_CODEUNIT`).
|
| 43 |
+
Each code unit contains an 8-bit `opcode` and an 8-bit argument (`oparg`), both unsigned.
|
| 44 |
+
In order to make the bytecode format independent of the machine byte order when stored on disk,
|
| 45 |
+
`opcode` is always the first byte and `oparg` is always the second byte.
|
| 46 |
+
Macros are used to extract the `opcode` and `oparg` from a code unit
|
| 47 |
+
(`_Py_OPCODE(word)` and `_Py_OPARG(word)`).
|
| 48 |
+
Some instructions (for example, `NOP` or `POP_TOP`) have no argument -- in this case
|
| 49 |
+
we ignore `oparg`.
|
| 50 |
+
A simplified version of the interpreter's main loop looks like this:
|
| 51 |
+
```c
|
| 52 |
+
_Py_CODEUNIT *first_instr = code->co_code_adaptive;
|
| 53 |
+
_Py_CODEUNIT *next_instr = first_instr;
|
| 54 |
+
while (1) {
|
| 55 |
+
_Py_CODEUNIT word = *next_instr++;
|
| 56 |
+
unsigned char opcode = _Py_OPCODE(word);
|
| 57 |
+
unsigned int oparg = _Py_OPARG(word);
|
| 58 |
+
switch (opcode) {
|
| 59 |
+
// ... A case for each opcode ...
|
| 60 |
+
}
|
| 61 |
+
}
|
| 62 |
+
```
|
| 63 |
+
This loop iterates over the instructions, decoding each into its `opcode`
|
| 64 |
+
and `oparg`, and then executes the switch case that implements this `opcode`.
|
| 65 |
+
The instruction format supports 256 different opcodes, which is sufficient.
|
| 66 |
+
However, it also limits `oparg` to 8-bit values, which is too restrictive.
|
| 67 |
+
To overcome this, the `EXTENDED_ARG` opcode allows us to prefix any instruction
|
| 68 |
+
with one or more additional data bytes, which combine into a larger oparg.
|
| 69 |
+
For example, this sequence of code units:
|
| 70 |
+
EXTENDED_ARG 1
|
| 71 |
+
EXTENDED_ARG 0
|
| 72 |
+
LOAD_CONST 2
|
| 73 |
+
would set `opcode` to `LOAD_CONST` and `oparg` to `65538` (that is, `0x1_00_02`).
|
| 74 |
+
The compiler should limit itself to at most three `EXTENDED_ARG` prefixes, to allow the
|
| 75 |
+
resulting `oparg` to fit in 32 bits, but the interpreter does not check this.
|
| 76 |
+
In the following, a `code unit` is always two bytes, while an `instruction` is a
|
| 77 |
+
sequence of code units consisting of zero to three `EXTENDED_ARG` opcodes followed by
|
| 78 |
+
a primary opcode.
|
| 79 |
+
The following loop, to be inserted just above the `switch` statement, will make the above
|
| 80 |
+
snippet decode a complete instruction:
|
| 81 |
+
```c
|
| 82 |
+
while (opcode == EXTENDED_ARG) {
|
| 83 |
+
word = *next_instr++;
|
| 84 |
+
opcode = _Py_OPCODE(word);
|
| 85 |
+
oparg = (oparg << 8) | _Py_OPARG(word);
|
| 86 |
+
}
|
| 87 |
+
```
|
| 88 |
+
For various reasons we'll get to later (mostly efficiency, given that `EXTENDED_ARG`
|
| 89 |
+
is rare) the actual code is different.
|
| 90 |
+
## Jumps
|
| 91 |
+
Note that when the `switch` statement is reached, `next_instr` (the "instruction offset")
|
| 92 |
+
already points to the next instruction.
|
| 93 |
+
Thus, jump instructions can be implemented by manipulating `next_instr`:
|
| 94 |
+
- A jump forward (`JUMP_FORWARD`) sets `next_instr += oparg`.
|
| 95 |
+
- A jump backward (`JUMP_BACKWARD`) sets `next_instr -= oparg`.
|
| 96 |
+
## Inline cache entries
|
| 97 |
+
Some (specialized or specializable) instructions have an associated "inline cache".
|
| 98 |
+
The inline cache consists of one or more two-byte entries included in the bytecode
|
| 99 |
+
array as additional words following the `opcode`/`oparg` pair.
|
| 100 |
+
The size of the inline cache for a particular instruction is fixed by its `opcode`.
|
| 101 |
+
Moreover, the inline cache size for all instructions in a
|
| 102 |
+
[family of specialized/specializable instructions](#Specialization)
|
| 103 |
+
(for example, `LOAD_ATTR`, `LOAD_ATTR_SLOT`, `LOAD_ATTR_MODULE`) must all be
|
| 104 |
+
the same. Cache entries are reserved by the compiler and initialized with zeros.
|
| 105 |
+
Although they are represented by code units, cache entries do not conform to the
|
| 106 |
+
`opcode` / `oparg` format.
|
| 107 |
+
If an instruction has an inline cache, the layout of its cache is described in
|
| 108 |
+
the instruction's definition in [`Python/bytecodes.c`](../Python/bytecodes.c).
|
| 109 |
+
The structs defined in [`pycore_code.h`](../Include/internal/pycore_code.h)
|
| 110 |
+
allow us to access the cache by casting `next_instr` to a pointer to the relevant
|
| 111 |
+
`struct`. The size of such a `struct` must be independent of the machine
|
| 112 |
+
architecture, word size and alignment requirements. For a 32-bit field, the
|
| 113 |
+
`struct` should use `_Py_CODEUNIT field[2]`.
|
| 114 |
+
The instruction implementation is responsible for advancing `next_instr` past the inline cache.
|
| 115 |
+
For example, if an instruction's inline cache is four bytes (that is, two code units) in size,
|
| 116 |
+
the code for the instruction must contain `next_instr += 2;`.
|
| 117 |
+
This is equivalent to a relative forward jump by that many code units.
|
| 118 |
+
(In the interpreter definition DSL, this is coded as `JUMPBY(n)`, where `n` is the number
|
| 119 |
+
of code units to jump, typically given as a named constant.)
|
| 120 |
+
Serializing non-zero cache entries would present a problem because the serialization
|
| 121 |
+
(:mod:`marshal`) format must be independent of the machine byte order.
|
| 122 |
+
More information about the use of inline caches can be found in
|
| 123 |
+
[PEP 659](https://peps.python.org/pep-0659/#ancillary-data).
|
| 124 |
+
## The evaluation stack
|
| 125 |
+
Most instructions read or write some data in the form of object references (`PyObject *`).
|
| 126 |
+
The CPython bytecode interpreter is a stack machine, meaning that its instructions operate
|
| 127 |
+
by pushing data onto and popping it off the stack.
|
| 128 |
+
The stack forms part of the frame for the code object. Its maximum depth is calculated
|
| 129 |
+
by the compiler and stored in the `co_stacksize` field of the code object, so that the
|
| 130 |
+
stack can be pre-allocated as a contiguous array of `PyObject*` pointers, when the frame
|
| 131 |
+
is created.
|
| 132 |
+
The stack effects of each instruction are also exposed through the
|
| 133 |
+
[opcode metadata](../Include/internal/pycore_opcode_metadata.h) through two
|
| 134 |
+
functions that report how many stack elements the instructions consumes,
|
| 135 |
+
and how many it produces (`_PyOpcode_num_popped` and `_PyOpcode_num_pushed`).
|
| 136 |
+
For example, the `BINARY_OP` instruction pops two objects from the stack and pushes the
|
| 137 |
+
result back onto the stack.
|
| 138 |
+
The stack grows up in memory; the operation `PUSH(x)` is equivalent to `*stack_pointer++ = x`,
|
| 139 |
+
whereas `x = POP()` means `x = *--stack_pointer`.
|
| 140 |
+
Overflow and underflow checks are active in debug mode, but are otherwise optimized away.
|
| 141 |
+
At any point during execution, the stack level is knowable based on the instruction pointer
|
| 142 |
+
alone, and some properties of each item on the stack are also known.
|
| 143 |
+
In particular, only a few instructions may push a `NULL` onto the stack, and the positions
|
| 144 |
+
that may be `NULL` are known.
|
| 145 |
+
A few other instructions (`GET_ITER`, `FOR_ITER`) push or pop an object that is known to
|
| 146 |
+
be an iterator.
|
| 147 |
+
Instruction sequences that do not allow statically knowing the stack depth are deemed illegal;
|
| 148 |
+
the bytecode compiler never generates such sequences.
|
| 149 |
+
For example, the following sequence is illegal, because it keeps pushing items on the stack:
|
| 150 |
+
LOAD_FAST 0
|
| 151 |
+
JUMP_BACKWARD 2
|
| 152 |
+
> [!NOTE]
|
| 153 |
+
> Do not confuse the evaluation stack with the call stack, which is used to implement calling
|
| 154 |
+
> and returning from functions.
|
| 155 |
+
## Error handling
|
| 156 |
+
When the implementation of an opcode raises an exception, it jumps to the
|
| 157 |
+
`exception_unwind` label in [Python/ceval.c](../Python/ceval.c).
|
| 158 |
+
The exception is then handled as described in the
|
| 159 |
+
[`exception handling documentation`](exception_handling.md#handling-exceptions).
|
| 160 |
+
## Python-to-Python calls
|
| 161 |
+
The `_PyEval_EvalFrameDefault()` function is recursive, because sometimes
|
| 162 |
+
the interpreter calls some C function that calls back into the interpreter.
|
| 163 |
+
In 3.10 and before, this was the case even when a Python function called
|
| 164 |
+
another Python function:
|
| 165 |
+
The `CALL` opcode would call the `tp_call` dispatch function of the
|
| 166 |
+
callee, which would extract the code object, create a new frame for the call
|
| 167 |
+
stack, and then call back into the interpreter. This approach is very general
|
| 168 |
+
but consumes several C stack frames for each nested Python call, thereby
|
| 169 |
+
increasing the risk of an (unrecoverable) C stack overflow.
|
| 170 |
+
Since 3.11, the `CALL` instruction special-cases function objects to "inline"
|
| 171 |
+
the call. When a call gets inlined, a new frame gets pushed onto the call
|
| 172 |
+
stack and the interpreter "jumps" to the start of the callee's bytecode.
|
| 173 |
+
When an inlined callee executes a `RETURN_VALUE` instruction, the frame is
|
| 174 |
+
popped off the call stack and the interpreter returns to its caller,
|
| 175 |
+
by popping a frame off the call stack and "jumping" to the return address.
|
| 176 |
+
There is a flag in the frame (`frame->is_entry`) that indicates whether
|
| 177 |
+
the frame was inlined (set if it wasn't).
|
| 178 |
+
If `RETURN_VALUE` finds this flag set, it performs the usual cleanup and
|
| 179 |
+
returns from `_PyEval_EvalFrameDefault()` altogether, to a C caller.
|
| 180 |
+
A similar check is performed when an unhandled exception occurs.
|
| 181 |
+
## The call stack
|
| 182 |
+
Up through 3.10, the call stack was implemented as a singly-linked list of
|
| 183 |
+
[frame objects](frames.md). This was expensive because each call would require a
|
| 184 |
+
heap allocation for the stack frame.
|
| 185 |
+
Since 3.11, frames are no longer fully-fledged objects. Instead, a leaner internal
|
| 186 |
+
`_PyInterpreterFrame` structure is used. Most frames are allocated contiguously in a
|
| 187 |
+
per-thread stack (see `_PyThreadState_PushFrame` in [Python/pystate.c](../Python/pystate.c)),
|
| 188 |
+
which improves memory locality and reduces overhead.
|
| 189 |
+
If the current `datastack_chunk` has enough space (`_PyThreadState_HasStackSpace`)
|
| 190 |
+
then the lightweight `_PyFrame_PushUnchecked` can be used instead of `_PyThreadState_PushFrame`.
|
| 191 |
+
Sometimes an actual `PyFrameObject` is needed, such as when Python code calls
|
| 192 |
+
`sys._getframe()` or an extension module calls
|
| 193 |
+
[`PyEval_GetFrame()`](https://docs.python.org/3/c-api/reflection.html#c.PyEval_GetFrame).
|
| 194 |
+
In this case we allocate a proper `PyFrameObject` and initialize it from the
|
| 195 |
+
`_PyInterpreterFrame`.
|
| 196 |
+
Things get more complicated when generators are involved, since those do not
|
| 197 |
+
follow the push/pop model. This includes async functions, which are based on
|
| 198 |
+
the same mechanism. A generator object has space for a `_PyInterpreterFrame`
|
| 199 |
+
structure, including the variable-size part (used for locals and the eval stack).
|
| 200 |
+
When a generator (or async) function is first called, a special opcode
|
| 201 |
+
`RETURN_GENERATOR` is executed, which is responsible for creating the
|
| 202 |
+
generator object. The generator object's `_PyInterpreterFrame` is initialized
|
| 203 |
+
with a copy of the current stack frame. The current stack frame is then popped
|
| 204 |
+
off the frame stack and the generator object is returned.
|
| 205 |
+
(Details differ depending on the `is_entry` flag.)
|
| 206 |
+
When the generator is resumed, the interpreter pushes its `_PyInterpreterFrame`
|
| 207 |
+
onto the frame stack and resumes execution.
|
| 208 |
+
See also the [generators](generators.md) section.
|
| 209 |
+
<!--
|
| 210 |
+
## All sorts of variables
|
| 211 |
+
The bytecode compiler determines the scope in which each variable name is defined,
|
| 212 |
+
and generates instructions accordingly. For example, loading a local variable
|
| 213 |
+
onto the stack is done using `LOAD_FAST`, while loading a global is done using
|
| 214 |
+
`LOAD_GLOBAL`.
|
| 215 |
+
The key types of variables are:
|
| 216 |
+
- fast locals: used in functions
|
| 217 |
+
- (slow or regular) locals: used in classes and at the top level
|
| 218 |
+
- globals and builtins: the compiler cannot distinguish between globals and
|
| 219 |
+
builtins (though at runtime, the specializing interpreter can)
|
| 220 |
+
- cells: used for nonlocal references
|
| 221 |
+
(TODO: Write the rest of this section. Alas, the author got distracted and won't have time to continue this for a while.)
|
| 222 |
+
-->
|
| 223 |
+
<!--
|
| 224 |
+
Other topics
|
| 225 |
+
------------
|
| 226 |
+
(TODO: Each of the following probably deserves its own section.)
|
| 227 |
+
- co_consts, co_names, co_varnames, and their ilk
|
| 228 |
+
- How calls work (how args are transferred, return, exceptions)
|
| 229 |
+
- Eval breaker (interrupts, GIL)
|
| 230 |
+
- Tracing
|
| 231 |
+
- Setting the current lineno (debugger-induced jumps)
|
| 232 |
+
- Specialization, inline caches etc.
|
| 233 |
+
-->
|
| 234 |
+
## Introducing a new bytecode instruction
|
| 235 |
+
It is occasionally necessary to add a new opcode in order to implement
|
| 236 |
+
a new feature or change the way that existing features are compiled.
|
| 237 |
+
This section describes the changes required to do this.
|
| 238 |
+
First, you must choose a name for the bytecode, implement it in
|
| 239 |
+
[`Python/bytecodes.c`](../Python/bytecodes.c) and add a documentation
|
| 240 |
+
entry in [`Doc/library/dis.rst`](../Doc/library/dis.rst).
|
| 241 |
+
Then run `make regen-cases` to assign a number for it (see
|
| 242 |
+
[`Include/opcode_ids.h`](../Include/opcode_ids.h)) and regenerate a
|
| 243 |
+
number of files with the actual implementation of the bytecode in
|
| 244 |
+
[`Python/generated_cases.c.h`](../Python/generated_cases.c.h) and
|
| 245 |
+
metadata about it in additional files.
|
| 246 |
+
With a new bytecode you must also change what is called the "magic number" for
|
| 247 |
+
.pyc files: bump the value of the variable `MAGIC_NUMBER` in
|
| 248 |
+
[`Lib/importlib/_bootstrap_external.py`](../Lib/importlib/_bootstrap_external.py).
|
| 249 |
+
Changing this number will lead to all .pyc files with the old `MAGIC_NUMBER`
|
| 250 |
+
to be recompiled by the interpreter on import. Whenever `MAGIC_NUMBER` is
|
| 251 |
+
changed, the ranges in the `magic_values` array in
|
| 252 |
+
[`PC/launcher.c`](../PC/launcher.c) may also need to be updated. Changes to
|
| 253 |
+
[`Lib/importlib/_bootstrap_external.py`](../Lib/importlib/_bootstrap_external.py)
|
| 254 |
+
will take effect only after running `make regen-importlib`.
|
| 255 |
+
> [!NOTE]
|
| 256 |
+
> Running `make regen-importlib` before adding the new bytecode target to
|
| 257 |
+
> [`Python/bytecodes.c`](../Python/bytecodes.c)
|
| 258 |
+
> (followed by `make regen-cases`) will result in an error. You should only run
|
| 259 |
+
> `make regen-importlib` after the new bytecode target has been added.
|
| 260 |
+
> [!NOTE]
|
| 261 |
+
> On Windows, running the `./build.bat` script will automatically
|
| 262 |
+
> regenerate the required files without requiring additional arguments.
|
| 263 |
+
Finally, you need to introduce the use of the new bytecode. Update
|
| 264 |
+
[`Python/codegen.c`](../Python/codegen.c) to emit code with this bytecode.
|
| 265 |
+
Optimizations in [`Python/flowgraph.c`](../Python/flowgraph.c) may also
|
| 266 |
+
need to be updated. If the new opcode affects a control flow or the block
|
| 267 |
+
stack, you may have to update the `frame_setlineno()` function in
|
| 268 |
+
[`Objects/frameobject.c`](../Objects/frameobject.c). It may also be necessary
|
| 269 |
+
to update [`Lib/dis.py`](../Lib/dis.py) if the new opcode interprets its
|
| 270 |
+
argument in a special way (like `FORMAT_VALUE` or `MAKE_FUNCTION`).
|
| 271 |
+
If you make a change here that can affect the output of bytecode that
|
| 272 |
+
is already in existence and you do not change the magic number, make
|
| 273 |
+
sure to delete your old .py(c|o) files! Even though you will end up changing
|
| 274 |
+
the magic number if you change the bytecode, while you are debugging your work
|
| 275 |
+
you may be changing the bytecode output without constantly bumping up the
|
| 276 |
+
magic number. This can leave you with stale .pyc files that will not be
|
| 277 |
+
recreated.
|
| 278 |
+
Running `find . -name '*.py[co]' -exec rm -f '{}' +` should delete all .pyc
|
| 279 |
+
files you have, forcing new ones to be created and thus allow you test out your
|
| 280 |
+
new bytecode properly. Run `make regen-importlib` for updating the
|
| 281 |
+
bytecode of frozen importlib files. You have to run `make` again after this
|
| 282 |
+
to recompile the generated C files.
|
| 283 |
+
## Specialization
|
| 284 |
+
Bytecode specialization, which was introduced in
|
| 285 |
+
[PEP 659](https://peps.python.org/pep-0659/), speeds up program execution by
|
| 286 |
+
rewriting instructions based on runtime information. This is done by replacing
|
| 287 |
+
a generic instruction with a faster version that works for the case that this
|
| 288 |
+
program encounters. Each specializable instruction is responsible for rewriting
|
| 289 |
+
itself, using its [inline caches](#inline-cache-entries) for
|
| 290 |
+
bookkeeping.
|
| 291 |
+
When an adaptive instruction executes, it may attempt to specialize itself,
|
| 292 |
+
depending on the argument and the contents of its cache. This is done
|
| 293 |
+
by calling one of the `_Py_Specialize_XXX` functions in
|
| 294 |
+
[`Python/specialize.c`](../Python/specialize.c).
|
| 295 |
+
The specialized instructions are responsible for checking that the special-case
|
| 296 |
+
assumptions still apply, and de-optimizing back to the generic version if not.
|
| 297 |
+
## Families of instructions
|
| 298 |
+
A *family* of instructions consists of an adaptive instruction along with the
|
| 299 |
+
specialized instructions that it can be replaced by.
|
| 300 |
+
It has the following fundamental properties:
|
| 301 |
+
* It corresponds to a single instruction in the code
|
| 302 |
+
generated by the bytecode compiler.
|
| 303 |
+
* It has a single adaptive instruction that records an execution count and,
|
| 304 |
+
at regular intervals, attempts to specialize itself. If not specializing,
|
| 305 |
+
it executes the base implementation.
|
| 306 |
+
* It has at least one specialized form of the instruction that is tailored
|
| 307 |
+
for a particular value or set of values at runtime.
|
| 308 |
+
* All members of the family must have the same number of inline cache entries,
|
| 309 |
+
to ensure correct execution.
|
| 310 |
+
Individual family members do not need to use all of the entries,
|
| 311 |
+
but must skip over any unused entries when executing.
|
| 312 |
+
The current implementation also requires the following,
|
| 313 |
+
although these are not fundamental and may change:
|
| 314 |
+
* All families use one or more inline cache entries,
|
| 315 |
+
the first entry is always the counter.
|
| 316 |
+
* All instruction names should start with the name of the adaptive
|
| 317 |
+
instruction.
|
| 318 |
+
* Specialized forms should have names describing their specialization.
|
| 319 |
+
## Example family
|
| 320 |
+
The `LOAD_GLOBAL` instruction (in [Python/bytecodes.c](../Python/bytecodes.c))
|
| 321 |
+
already has an adaptive family that serves as a relatively simple example.
|
| 322 |
+
The `LOAD_GLOBAL` instruction performs adaptive specialization,
|
| 323 |
+
calling `_Py_Specialize_LoadGlobal()` when the counter reaches zero.
|
| 324 |
+
There are two specialized instructions in the family, `LOAD_GLOBAL_MODULE`
|
| 325 |
+
which is specialized for global variables in the module, and
|
| 326 |
+
`LOAD_GLOBAL_BUILTIN` which is specialized for builtin variables.
|
| 327 |
+
## Performance analysis
|
| 328 |
+
The benefit of a specialization can be assessed with the following formula:
|
| 329 |
+
`Tbase/Tadaptive`.
|
| 330 |
+
Where `Tbase` is the mean time to execute the base instruction,
|
| 331 |
+
and `Tadaptive` is the mean time to execute the specialized and adaptive forms.
|
| 332 |
+
`Tadaptive = (sum(Ti*Ni) + Tmiss*Nmiss)/(sum(Ni)+Nmiss)`
|
| 333 |
+
`Ti` is the time to execute the `i`th instruction in the family and `Ni` is
|
| 334 |
+
the number of times that instruction is executed.
|
| 335 |
+
`Tmiss` is the time to process a miss, including de-optimzation
|
| 336 |
+
and the time to execute the base instruction.
|
| 337 |
+
The ideal situation is where misses are rare and the specialized
|
| 338 |
+
forms are much faster than the base instruction.
|
| 339 |
+
`LOAD_GLOBAL` is near ideal, `Nmiss/sum(Ni) ≈ 0`.
|
| 340 |
+
In which case we have `Tadaptive ≈ sum(Ti*Ni)`.
|
| 341 |
+
Since we can expect the specialized forms `LOAD_GLOBAL_MODULE` and
|
| 342 |
+
`LOAD_GLOBAL_BUILTIN` to be much faster than the adaptive base instruction,
|
| 343 |
+
we would expect the specialization of `LOAD_GLOBAL` to be profitable.
|
| 344 |
+
## Design considerations
|
| 345 |
+
While `LOAD_GLOBAL` may be ideal, instructions like `LOAD_ATTR` and
|
| 346 |
+
`CALL_FUNCTION` are not. For maximum performance we want to keep `Ti`
|
| 347 |
+
low for all specialized instructions and `Nmiss` as low as possible.
|
| 348 |
+
Keeping `Nmiss` low means that there should be specializations for almost
|
| 349 |
+
all values seen by the base instruction. Keeping `sum(Ti*Ni)` low means
|
| 350 |
+
keeping `Ti` low which means minimizing branches and dependent memory
|
| 351 |
+
accesses (pointer chasing). These two objectives may be in conflict,
|
| 352 |
+
requiring judgement and experimentation to design the family of instructions.
|
| 353 |
+
The size of the inline cache should as small as possible,
|
| 354 |
+
without impairing performance, to reduce the number of
|
| 355 |
+
`EXTENDED_ARG` jumps, and to reduce pressure on the CPU's data cache.
|
| 356 |
+
### Gathering data
|
| 357 |
+
Before choosing how to specialize an instruction, it is important to gather
|
| 358 |
+
some data. What are the patterns of usage of the base instruction?
|
| 359 |
+
Data can best be gathered by instrumenting the interpreter. Since a
|
| 360 |
+
specialization function and adaptive instruction are going to be required,
|
| 361 |
+
instrumentation can most easily be added in the specialization function.
|
| 362 |
+
### Choice of specializations
|
| 363 |
+
The performance of the specializing adaptive interpreter relies on the
|
| 364 |
+
quality of specialization and keeping the overhead of specialization low.
|
| 365 |
+
Specialized instructions must be fast. In order to be fast,
|
| 366 |
+
specialized instructions should be tailored for a particular
|
| 367 |
+
set of values that allows them to:
|
| 368 |
+
1. Verify that incoming value is part of that set with low overhead.
|
| 369 |
+
2. Perform the operation quickly.
|
| 370 |
+
This requires that the set of values is chosen such that membership can be
|
| 371 |
+
tested quickly and that membership is sufficient to allow the operation to be
|
| 372 |
+
performed quickly.
|
| 373 |
+
For example, `LOAD_GLOBAL_MODULE` is specialized for `globals()`
|
| 374 |
+
dictionaries that have a keys with the expected version.
|
| 375 |
+
This can be tested quickly:
|
| 376 |
+
* `globals->keys->dk_version == expected_version`
|
| 377 |
+
and the operation can be performed quickly:
|
| 378 |
+
* `value = entries[cache->index].me_value;`.
|
| 379 |
+
Because it is impossible to measure the performance of an instruction without
|
| 380 |
+
also measuring unrelated factors, the assessment of the quality of a
|
| 381 |
+
specialization will require some judgement.
|
| 382 |
+
As a general rule, specialized instructions should be much faster than the
|
| 383 |
+
base instruction.
|
| 384 |
+
### Implementation of specialized instructions
|
| 385 |
+
In general, specialized instructions should be implemented in two parts:
|
| 386 |
+
1. A sequence of guards, each of the form
|
| 387 |
+
`DEOPT_IF(guard-condition-is-false, BASE_NAME)`.
|
| 388 |
+
2. The operation, which should ideally have no branches and
|
| 389 |
+
a minimum number of dependent memory accesses.
|
| 390 |
+
In practice, the parts may overlap, as data required for guards
|
| 391 |
+
can be re-used in the operation.
|
| 392 |
+
If there are branches in the operation, then consider further specialization
|
| 393 |
+
to eliminate the branches.
|
| 394 |
+
### Maintaining stats
|
| 395 |
+
Finally, take care that stats are gathered correctly.
|
| 396 |
+
After the last `DEOPT_IF` has passed, a hit should be recorded with
|
| 397 |
+
`STAT_INC(BASE_INSTRUCTION, hit)`.
|
| 398 |
+
After an optimization has been deferred in the adaptive instruction,
|
| 399 |
+
that should be recorded with `STAT_INC(BASE_INSTRUCTION, deferred)`.
|
| 400 |
+
Additional resources
|
| 401 |
+
--------------------
|
| 402 |
+
* Brandt Bucher's talk about the specializing interpreter at PyCon US 2023.
|
| 403 |
+
[Slides](https://github.com/brandtbucher/brandtbucher/blob/master/2023/04/21/inside_cpython_311s_new_specializing_adaptive_interpreter.pdf)
|
| 404 |
+
[Video](https://www.youtube.com/watch?v=PGZPSWZSkJI&t=1470s)
|
| 405 |
+
|
| 406 |
+
#!/usr/bin/env python3
|
| 407 |
+
import asyncio
|
| 408 |
+
import argparse
|
| 409 |
+
import json
|
| 410 |
+
import os
|
| 411 |
+
import platform
|
| 412 |
+
import re
|
| 413 |
+
import shlex
|
| 414 |
+
import shutil
|
| 415 |
+
import signal
|
| 416 |
+
import subprocess
|
| 417 |
+
import sys
|
| 418 |
+
import sysconfig
|
| 419 |
+
from asyncio import wait_for
|
| 420 |
+
from contextlib import asynccontextmanager
|
| 421 |
+
from datetime import datetime, timezone
|
| 422 |
+
from glob import glob
|
| 423 |
+
from os.path import abspath, basename, relpath
|
| 424 |
+
from pathlib import Path
|
| 425 |
+
from subprocess import CalledProcessError
|
| 426 |
+
from tempfile import TemporaryDirectory
|
| 427 |
+
SCRIPT_NAME = Path(__file__).name
|
| 428 |
+
ANDROID_DIR = Path(__file__).resolve().parent
|
| 429 |
+
PYTHON_DIR = ANDROID_DIR.parent
|
| 430 |
+
in_source_tree = (
|
| 431 |
+
ANDROID_DIR.name == "Android" and (PYTHON_DIR / "pyconfig.h.in").exists()
|
| 432 |
+
)
|
| 433 |
+
ENV_SCRIPT = ANDROID_DIR / "android-env.sh"
|
| 434 |
+
TESTBED_DIR = ANDROID_DIR / "testbed"
|
| 435 |
+
CROSS_BUILD_DIR = PYTHON_DIR / "cross-build"
|
| 436 |
+
HOSTS = ["aarch64-linux-android", "x86_64-linux-android"]
|
| 437 |
+
APP_ID = "org.python.testbed"
|
| 438 |
+
DECODE_ARGS = ("UTF-8", "backslashreplace")
|
| 439 |
+
try:
|
| 440 |
+
android_home = Path(os.environ['ANDROID_HOME'])
|
| 441 |
+
except KeyError:
|
| 442 |
+
sys.exit("The ANDROID_HOME environment variable is required.")
|
| 443 |
+
adb = Path(
|
| 444 |
+
f"{android_home}/platform-tools/adb"
|
| 445 |
+
+ (".exe" if os.name == "nt" else "")
|
| 446 |
+
)
|
| 447 |
+
gradlew = Path(
|
| 448 |
+
f"{TESTBED_DIR}/gradlew"
|
| 449 |
+
+ (".bat" if os.name == "nt" else "")
|
| 450 |
+
)
|
| 451 |
+
# Whether we've seen any output from Python yet.
|
| 452 |
+
python_started = False
|
| 453 |
+
# Buffer for verbose output which will be displayed only if a test fails and
|
| 454 |
+
# there has been no output from Python.
|
| 455 |
+
hidden_output = []
|
| 456 |
+
def log_verbose(context, line, stream=sys.stdout):
|
| 457 |
+
if context.verbose:
|
| 458 |
+
stream.write(line)
|
| 459 |
+
else:
|
| 460 |
+
hidden_output.append((stream, line))
|
| 461 |
+
def delete_glob(pattern):
|
| 462 |
+
# Path.glob doesn't accept non-relative patterns.
|
| 463 |
+
for path in glob(str(pattern)):
|
| 464 |
+
path = Path(path)
|
| 465 |
+
print(f"Deleting {path} ...")
|
| 466 |
+
if path.is_dir() and not path.is_symlink():
|
| 467 |
+
shutil.rmtree(path)
|
| 468 |
+
else:
|
| 469 |
+
path.unlink()
|
| 470 |
+
def subdir(*parts, create=False):
|
| 471 |
+
path = CROSS_BUILD_DIR.joinpath(*parts)
|
| 472 |
+
if not path.exists():
|
| 473 |
+
if not create:
|
| 474 |
+
sys.exit(
|
| 475 |
+
f"{path} does not exist. Create it by running the appropriate "
|
| 476 |
+
f"`configure` subcommand of {SCRIPT_NAME}.")
|
| 477 |
+
else:
|
| 478 |
+
path.mkdir(parents=True)
|
| 479 |
+
return path
|
| 480 |
+
def run(command, *, host=None, env=None, log=True, **kwargs):
|
| 481 |
+
kwargs.setdefault("check", True)
|
| 482 |
+
if env is None:
|
| 483 |
+
env = os.environ.copy()
|
| 484 |
+
if host:
|
| 485 |
+
host_env = android_env(host)
|
| 486 |
+
print_env(host_env)
|
| 487 |
+
env.update(host_env)
|
| 488 |
+
if log:
|
| 489 |
+
print(">", join_command(command))
|
| 490 |
+
return subprocess.run(command, env=env, **kwargs)
|
| 491 |
+
# Format a command so it can be copied into a shell. Like shlex.join, but also
|
| 492 |
+
# accepts arguments which are Paths, or a single string/Path outside of a list.
|
| 493 |
+
def join_command(args):
|
| 494 |
+
if isinstance(args, (str, Path)):
|
| 495 |
+
return str(args)
|
| 496 |
+
else:
|
| 497 |
+
return shlex.join(map(str, args))
|
| 498 |
+
# Format the environment so it can be pasted into a shell.
|
| 499 |
+
def print_env(env):
|
| 500 |
+
for key, value in sorted(env.items()):
|
| 501 |
+
print(f"export {key}={shlex.quote(value)}")
|
| 502 |
+
def android_env(host):
|
| 503 |
+
if host:
|
| 504 |
+
prefix = subdir(host) / "prefix"
|
| 505 |
+
else:
|
| 506 |
+
prefix = ANDROID_DIR / "prefix"
|
| 507 |
+
sysconfig_files = prefix.glob("lib/python*/_sysconfigdata__android_*.py")
|
| 508 |
+
sysconfig_filename = next(sysconfig_files).name
|
| 509 |
+
host = re.fullmatch(r"_sysconfigdata__android_(.+).py", sysconfig_filename)[1]
|
| 510 |
+
env_output = subprocess.run(
|
| 511 |
+
f"set -eu; "
|
| 512 |
+
f"HOST={host}; "
|
| 513 |
+
f"PREFIX={prefix}; "
|
| 514 |
+
f". {ENV_SCRIPT}; "
|
| 515 |
+
f"export",
|
| 516 |
+
check=True, shell=True, capture_output=True, encoding='utf-8',
|
| 517 |
+
).stdout
|
| 518 |
+
env = {}
|
| 519 |
+
for line in env_output.splitlines():
|
| 520 |
+
# We don't require every line to match, as there may be some other
|
| 521 |
+
# output from installing the NDK.
|
| 522 |
+
if match := re.search(
|
| 523 |
+
"^(declare -x |export )?(\\w+)=['\"]?(.*?)['\"]?$", line
|
| 524 |
+
):
|
| 525 |
+
key, value = match[2], match[3]
|
| 526 |
+
if os.environ.get(key) != value:
|
| 527 |
+
env[key] = value
|
| 528 |
+
if not env:
|
| 529 |
+
raise ValueError(f"Found no variables in {ENV_SCRIPT.name} output:\n"
|
| 530 |
+
+ env_output)
|
| 531 |
+
return env
|
| 532 |
+
def build_python_path():
|
| 533 |
+
"""The path to the build Python binary."""
|
| 534 |
+
build_dir = subdir("build")
|
| 535 |
+
binary = build_dir / "python"
|
| 536 |
+
if not binary.is_file():
|
| 537 |
+
binary = binary.with_suffix(".exe")
|
| 538 |
+
if not binary.is_file():
|
| 539 |
+
raise FileNotFoundError("Unable to find `python(.exe)` in "
|
| 540 |
+
f"{build_dir}")
|
| 541 |
+
return binary
|
| 542 |
+
def configure_build_python(context):
|
| 543 |
+
if context.clean:
|
| 544 |
+
clean("build")
|
| 545 |
+
os.chdir(subdir("build", create=True))
|
| 546 |
+
command = [relpath(PYTHON_DIR / "configure")]
|
| 547 |
+
if context.args:
|
| 548 |
+
command.extend(context.args)
|
| 549 |
+
run(command)
|
| 550 |
+
def make_build_python(context):
|
| 551 |
+
os.chdir(subdir("build"))
|
| 552 |
+
run(["make", "-j", str(os.cpu_count())])
|
| 553 |
+
# To create new builds of these dependencies, usually all that's necessary is to
|
| 554 |
+
# push a tag to the cpython-android-source-deps repository, and GitHub Actions
|
| 555 |
+
# will do the rest.
|
| 556 |
+
#
|
| 557 |
+
# If you're a member of the Python core team, and you'd like to be able to push
|
| 558 |
+
# these tags yourself, please contact Malcolm Smith or Russell Keith-Magee.
|
| 559 |
+
def unpack_deps(host, prefix_dir):
|
| 560 |
+
os.chdir(prefix_dir)
|
| 561 |
+
deps_url = "https://github.com/beeware/cpython-android-source-deps/releases/download"
|
| 562 |
+
for name_ver in ["bzip2-1.0.8-3", "libffi-3.4.4-3", "openssl-3.0.18-0",
|
| 563 |
+
"sqlite-3.50.4-0", "xz-5.4.6-1", "zstd-1.5.7-1"]:
|
| 564 |
+
filename = f"{name_ver}-{host}.tar.gz"
|
| 565 |
+
download(f"{deps_url}/{name_ver}/{filename}")
|
| 566 |
+
shutil.unpack_archive(filename)
|
| 567 |
+
os.remove(filename)
|
| 568 |
+
def download(url, target_dir="."):
|
| 569 |
+
out_path = f"{target_dir}/{basename(url)}"
|
| 570 |
+
run(["curl", "-Lf", "--retry", "5", "--retry-all-errors", "-o", out_path, url])
|
| 571 |
+
return out_path
|
| 572 |
+
def configure_host_python(context):
|
| 573 |
+
if context.clean:
|
| 574 |
+
clean(context.host)
|
| 575 |
+
host_dir = subdir(context.host, create=True)
|
| 576 |
+
prefix_dir = host_dir / "prefix"
|
| 577 |
+
if not prefix_dir.exists():
|
| 578 |
+
prefix_dir.mkdir()
|
| 579 |
+
unpack_deps(context.host, prefix_dir)
|
| 580 |
+
os.chdir(host_dir)
|
| 581 |
+
command = [
|
| 582 |
+
# Basic cross-compiling configuration
|
| 583 |
+
relpath(PYTHON_DIR / "configure"),
|
| 584 |
+
f"--host={context.host}",
|
| 585 |
+
f"--build={sysconfig.get_config_var('BUILD_GNU_TYPE')}",
|
| 586 |
+
f"--with-build-python={build_python_path()}",
|
| 587 |
+
"--without-ensurepip",
|
| 588 |
+
# Android always uses a shared libpython.
|
| 589 |
+
"--enable-shared",
|
| 590 |
+
"--without-static-libpython",
|
| 591 |
+
# Dependent libraries. The others are found using pkg-config: see
|
| 592 |
+
# android-env.sh.
|
| 593 |
+
f"--with-openssl={prefix_dir}",
|
| 594 |
+
]
|
| 595 |
+
if context.args:
|
| 596 |
+
command.extend(context.args)
|
| 597 |
+
run(command, host=context.host)
|
| 598 |
+
def make_host_python(context):
|
| 599 |
+
# The CFLAGS and LDFLAGS set in android-env include the prefix dir, so
|
| 600 |
+
# delete any previous Python installation to prevent it being used during
|
| 601 |
+
# the build.
|
| 602 |
+
host_dir = subdir(context.host)
|
| 603 |
+
prefix_dir = host_dir / "prefix"
|
| 604 |
+
for pattern in ("include/python*", "lib/libpython*", "lib/python*"):
|
| 605 |
+
delete_glob(f"{prefix_dir}/{pattern}")
|
| 606 |
+
# The Android environment variables were already captured in the Makefile by
|
| 607 |
+
# `configure`, and passing them again when running `make` may cause some
|
| 608 |
+
# flags to be duplicated. So we don't use the `host` argument here.
|
| 609 |
+
os.chdir(host_dir)
|
| 610 |
+
run(["make", "-j", str(os.cpu_count())])
|
| 611 |
+
# The `make install` output is very verbose and rarely useful, so
|
| 612 |
+
# suppress it by default.
|
| 613 |
+
run(
|
| 614 |
+
["make", "install", f"prefix={prefix_dir}"],
|
| 615 |
+
capture_output=not context.verbose,
|
| 616 |
+
)
|
| 617 |
+
def build_all(context):
|
| 618 |
+
steps = [configure_build_python, make_build_python, configure_host_python,
|
| 619 |
+
make_host_python]
|
| 620 |
+
for step in steps:
|
| 621 |
+
step(context)
|
| 622 |
+
def clean(host):
|
| 623 |
+
delete_glob(CROSS_BUILD_DIR / host)
|
| 624 |
+
def clean_all(context):
|
| 625 |
+
for host in HOSTS + ["build"]:
|
| 626 |
+
clean(host)
|
| 627 |
+
def setup_ci():
|
| 628 |
+
if "GITHUB_ACTIONS" in os.environ:
|
| 629 |
+
# Enable emulator hardware acceleration
|
| 630 |
+
# (https://github.blog/changelog/2024-04-02-github-actions-hardware-accelerated-android-virtualization-now-available/).
|
| 631 |
+
if platform.system() == "Linux":
|
| 632 |
+
run(
|
| 633 |
+
["sudo", "tee", "/etc/udev/rules.d/99-kvm4all.rules"],
|
| 634 |
+
input='KERNEL=="kvm", GROUP="kvm", MODE="0666", OPTIONS+="static_node=kvm"\n',
|
| 635 |
+
text=True,
|
| 636 |
+
)
|
| 637 |
+
run(["sudo", "udevadm", "control", "--reload-rules"])
|
| 638 |
+
run(["sudo", "udevadm", "trigger", "--name-match=kvm"])
|
| 639 |
+
# Free up disk space by deleting unused versions of the NDK
|
| 640 |
+
# (https://github.com/freakboy3742/pyspamsum/pull/108).
|
| 641 |
+
for line in ENV_SCRIPT.read_text().splitlines():
|
| 642 |
+
if match := re.fullmatch(r"ndk_version=(.+)", line):
|
| 643 |
+
ndk_version = match[1]
|
| 644 |
+
break
|
| 645 |
+
else:
|
| 646 |
+
raise ValueError(f"Failed to find NDK version in {ENV_SCRIPT.name}")
|
| 647 |
+
for item in (android_home / "ndk").iterdir():
|
| 648 |
+
if item.name[0].isdigit() and item.name != ndk_version:
|
| 649 |
+
delete_glob(item)
|
| 650 |
+
def setup_sdk():
|
| 651 |
+
sdkmanager = android_home / (
|
| 652 |
+
"cmdline-tools/latest/bin/sdkmanager"
|
| 653 |
+
+ (".bat" if os.name == "nt" else "")
|
| 654 |
+
)
|
| 655 |
+
# Gradle will fail if it needs to install an SDK package whose license
|
| 656 |
+
# hasn't been accepted, so pre-accept all licenses.
|
| 657 |
+
if not all((android_home / "licenses" / path).exists() for path in [
|
| 658 |
+
"android-sdk-arm-dbt-license", "android-sdk-license"
|
| 659 |
+
]):
|
| 660 |
+
run(
|
| 661 |
+
[sdkmanager, "--licenses"],
|
| 662 |
+
text=True,
|
| 663 |
+
capture_output=True,
|
| 664 |
+
input="y\n" * 100,
|
| 665 |
+
)
|
| 666 |
+
# Gradle may install this automatically, but we can't rely on that because
|
| 667 |
+
# we need to run adb within the logcat task.
|
| 668 |
+
if not adb.exists():
|
| 669 |
+
run([sdkmanager, "platform-tools"])
|
| 670 |
+
# To avoid distributing compiled artifacts without corresponding source code,
|
| 671 |
+
# the Gradle wrapper is not included in the CPython repository. Instead, we
|
| 672 |
+
# extract it from the Gradle GitHub repository.
|
| 673 |
+
def setup_testbed():
|
| 674 |
+
paths = ["gradlew", "gradlew.bat", "gradle/wrapper/gradle-wrapper.jar"]
|
| 675 |
+
if all((TESTBED_DIR / path).exists() for path in paths):
|
| 676 |
+
return
|
| 677 |
+
# The wrapper version isn't important, as any version of the wrapper can
|
| 678 |
+
# download any version of Gradle. The Gradle version actually used for the
|
| 679 |
+
# build is specified in testbed/gradle/wrapper/gradle-wrapper.properties.
|
| 680 |
+
version = "8.9.0"
|
| 681 |
+
for path in paths:
|
| 682 |
+
out_path = TESTBED_DIR / path
|
| 683 |
+
out_path.parent.mkdir(exist_ok=True)
|
| 684 |
+
download(
|
| 685 |
+
f"https://raw.githubusercontent.com/gradle/gradle/v{version}/{path}",
|
| 686 |
+
out_path.parent,
|
| 687 |
+
)
|
| 688 |
+
os.chmod(out_path, 0o755)
|
| 689 |
+
# run_testbed will build the app automatically, but it's useful to have this as
|
| 690 |
+
# a separate command to allow running the app outside of this script.
|
| 691 |
+
def build_testbed(context):
|
| 692 |
+
setup_sdk()
|
| 693 |
+
setup_testbed()
|
| 694 |
+
run(
|
| 695 |
+
[gradlew, "--console", "plain", "packageDebug", "packageDebugAndroidTest"],
|
| 696 |
+
cwd=TESTBED_DIR,
|
| 697 |
+
)
|
| 698 |
+
# Work around a bug involving sys.exit and TaskGroups
|
| 699 |
+
# (https://github.com/python/cpython/issues/101515).
|
| 700 |
+
def exit(*args):
|
| 701 |
+
raise MySystemExit(*args)
|
| 702 |
+
class MySystemExit(Exception):
|
| 703 |
+
pass
|
| 704 |
+
# The `test` subcommand runs all subprocesses through this context manager so
|
| 705 |
+
# that no matter what happens, they can always be cancelled from another task,
|
| 706 |
+
# and they will always be cleaned up on exit.
|
| 707 |
+
@asynccontextmanager
|
| 708 |
+
async def async_process(*args, **kwargs):
|
| 709 |
+
process = await asyncio.create_subprocess_exec(*args, **kwargs)
|
| 710 |
+
try:
|
| 711 |
+
yield process
|
| 712 |
+
finally:
|
| 713 |
+
if process.returncode is None:
|
| 714 |
+
# Allow a reasonably long time for Gradle to clean itself up,
|
| 715 |
+
# because we don't want stale emulators left behind.
|
| 716 |
+
timeout = 10
|
| 717 |
+
process.terminate()
|
| 718 |
+
try:
|
| 719 |
+
await wait_for(process.wait(), timeout)
|
| 720 |
+
except TimeoutError:
|
| 721 |
+
print(
|
| 722 |
+
f"Command {args} did not terminate after {timeout} seconds "
|
| 723 |
+
f" - sending SIGKILL"
|
| 724 |
+
)
|
| 725 |
+
process.kill()
|
| 726 |
+
# Even after killing the process we must still wait for it,
|
| 727 |
+
# otherwise we'll get the warning "Exception ignored in __del__".
|
| 728 |
+
await wait_for(process.wait(), timeout=1)
|
| 729 |
+
async def async_check_output(*args, **kwargs):
|
| 730 |
+
async with async_process(
|
| 731 |
+
*args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs
|
| 732 |
+
) as process:
|
| 733 |
+
stdout, stderr = await process.communicate()
|
| 734 |
+
if process.returncode == 0:
|
| 735 |
+
return stdout.decode(*DECODE_ARGS)
|
| 736 |
+
else:
|
| 737 |
+
raise CalledProcessError(
|
| 738 |
+
process.returncode, args,
|
| 739 |
+
stdout.decode(*DECODE_ARGS), stderr.decode(*DECODE_ARGS)
|
| 740 |
+
)
|
| 741 |
+
# Return a list of the serial numbers of connected devices. Emulators will have
|
| 742 |
+
# serials of the form "emulator-5678".
|
| 743 |
+
async def list_devices():
|
| 744 |
+
serials = []
|
| 745 |
+
header_found = False
|
| 746 |
+
lines = (await async_check_output(adb, "devices")).splitlines()
|
| 747 |
+
for line in lines:
|
| 748 |
+
# Ignore blank lines, and all lines before the header.
|
| 749 |
+
line = line.strip()
|
| 750 |
+
if line == "List of devices attached":
|
| 751 |
+
header_found = True
|
| 752 |
+
elif header_found and line:
|
| 753 |
+
try:
|
| 754 |
+
serial, status = line.split()
|
| 755 |
+
except ValueError:
|
| 756 |
+
raise ValueError(f"failed to parse {line!r}")
|
| 757 |
+
if status == "device":
|
| 758 |
+
serials.append(serial)
|
| 759 |
+
if not header_found:
|
| 760 |
+
raise ValueError(f"failed to parse {lines}")
|
| 761 |
+
return serials
|
| 762 |
+
async def find_device(context, initial_devices):
|
| 763 |
+
if context.managed:
|
| 764 |
+
print("Waiting for managed device - this may take several minutes")
|
| 765 |
+
while True:
|
| 766 |
+
new_devices = set(await list_devices()).difference(initial_devices)
|
| 767 |
+
if len(new_devices) == 0:
|
| 768 |
+
await asyncio.sleep(1)
|
| 769 |
+
elif len(new_devices) == 1:
|
| 770 |
+
serial = new_devices.pop()
|
| 771 |
+
print(f"Serial: {serial}")
|
| 772 |
+
return serial
|
| 773 |
+
else:
|
| 774 |
+
exit(f"Found more than one new device: {new_devices}")
|
| 775 |
+
else:
|
| 776 |
+
return context.connected
|
| 777 |
+
# An older version of this script in #121595 filtered the logs by UID instead.
|
| 778 |
+
# But logcat can't filter by UID until API level 31. If we ever switch back to
|
| 779 |
+
# filtering by UID, we'll also have to filter by time so we only show messages
|
| 780 |
+
# produced after the initial call to `stop_app`.
|
| 781 |
+
#
|
| 782 |
+
# We're more likely to miss the PID because it's shorter-lived, so there's a
|
| 783 |
+
# workaround in PythonSuite.kt to stop it being *too* short-lived.
|
| 784 |
+
async def find_pid(serial):
|
| 785 |
+
print("Waiting for app to start - this may take several minutes")
|
| 786 |
+
shown_error = False
|
| 787 |
+
while True:
|
| 788 |
+
try:
|
| 789 |
+
# `pidof` requires API level 24 or higher. The level 23 emulator
|
| 790 |
+
# includes it, but it doesn't work (it returns all processes).
|
| 791 |
+
pid = (await async_check_output(
|
| 792 |
+
adb, "-s", serial, "shell", "pidof", "-s", APP_ID
|
| 793 |
+
)).strip()
|
| 794 |
+
except CalledProcessError as e:
|
| 795 |
+
# If the app isn't running yet, pidof gives no output. So if there
|
| 796 |
+
# is output, there must have been some other error. However, this
|
| 797 |
+
# sometimes happens transiently, especially when running a managed
|
| 798 |
+
# emulator for the first time, so don't make it fatal.
|
| 799 |
+
if (e.stdout or e.stderr) and not shown_error:
|
| 800 |
+
print_called_process_error(e)
|
| 801 |
+
print("This may be transient, so continuing to wait")
|
| 802 |
+
shown_error = True
|
| 803 |
+
else:
|
| 804 |
+
# Some older devices (e.g. Nexus 4) return zero even when no process
|
| 805 |
+
# was found, so check whether we actually got any output.
|
| 806 |
+
if pid:
|
| 807 |
+
print(f"PID: {pid}")
|
| 808 |
+
return pid
|
| 809 |
+
# Loop fairly rapidly to avoid missing a short-lived process.
|
| 810 |
+
await asyncio.sleep(0.2)
|
| 811 |
+
async def logcat_task(context, initial_devices):
|
| 812 |
+
# Gradle may need to do some large downloads of libraries and emulator
|
| 813 |
+
# images. This will happen during find_device in --managed mode, or find_pid
|
| 814 |
+
# in --connected mode.
|
| 815 |
+
startup_timeout = 600
|
| 816 |
+
serial = await wait_for(find_device(context, initial_devices), startup_timeout)
|
| 817 |
+
pid = await wait_for(find_pid(serial), startup_timeout)
|
| 818 |
+
# `--pid` requires API level 24 or higher.
|
| 819 |
+
args = [adb, "-s", serial, "logcat", "--pid", pid, "--format", "tag"]
|
| 820 |
+
logcat_started = False
|
| 821 |
+
async with async_process(
|
| 822 |
+
*args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
| 823 |
+
) as process:
|
| 824 |
+
while line := (await process.stdout.readline()).decode(*DECODE_ARGS):
|
| 825 |
+
if match := re.fullmatch(r"([A-Z])/(.*)", line, re.DOTALL):
|
| 826 |
+
logcat_started = True
|
| 827 |
+
level, message = match.groups()
|
| 828 |
+
else:
|
| 829 |
+
# If the regex doesn't match, this is either a logcat startup
|
| 830 |
+
# error, or the second or subsequent line of a multi-line
|
| 831 |
+
# message. Python won't produce multi-line messages, but other
|
| 832 |
+
# components might.
|
| 833 |
+
level, message = None, line
|
| 834 |
+
# Exclude high-volume messages which are rarely useful.
|
| 835 |
+
if context.verbose < 2 and "from python test_syslog" in message:
|
| 836 |
+
continue
|
| 837 |
+
# Put high-level messages on stderr so they're highlighted in the
|
| 838 |
+
# buildbot logs. This will include Python's own stderr.
|
| 839 |
+
stream = (
|
| 840 |
+
sys.stderr
|
| 841 |
+
if level in ["W", "E", "F"] # WARNING, ERROR, FATAL (aka ASSERT)
|
| 842 |
+
else sys.stdout
|
| 843 |
+
)
|
| 844 |
+
# To simplify automated processing of the output, e.g. a buildbot
|
| 845 |
+
# posting a failure notice on a GitHub PR, we strip the level and
|
| 846 |
+
# tag indicators from Python's stdout and stderr.
|
| 847 |
+
for prefix in ["python.stdout: ", "python.stderr: "]:
|
| 848 |
+
if message.startswith(prefix):
|
| 849 |
+
global python_started
|
| 850 |
+
python_started = True
|
| 851 |
+
stream.write(message.removeprefix(prefix))
|
| 852 |
+
break
|
| 853 |
+
else:
|
| 854 |
+
# Non-Python messages add a lot of noise, but they may
|
| 855 |
+
# sometimes help explain a failure.
|
| 856 |
+
log_verbose(context, line, stream)
|
| 857 |
+
# If the device disconnects while logcat is running, which always
|
| 858 |
+
# happens in --managed mode, some versions of adb return non-zero.
|
| 859 |
+
# Distinguish this from a logcat startup error by checking whether we've
|
| 860 |
+
# received any logcat messages yet.
|
| 861 |
+
status = await wait_for(process.wait(), timeout=1)
|
| 862 |
+
if status != 0 and not logcat_started:
|
| 863 |
+
raise CalledProcessError(status, args)
|
| 864 |
+
def stop_app(serial):
|
| 865 |
+
run([adb, "-s", serial, "shell", "am", "force-stop", APP_ID], log=False)
|
| 866 |
+
async def gradle_task(context):
|
| 867 |
+
env = os.environ.copy()
|
| 868 |
+
if context.managed:
|
| 869 |
+
task_prefix = context.managed
|
| 870 |
+
else:
|
| 871 |
+
task_prefix = "connected"
|
| 872 |
+
env["ANDROID_SERIAL"] = context.connected
|
| 873 |
+
if context.ci_mode:
|
| 874 |
+
context.args[0:0] = [
|
| 875 |
+
# See _add_ci_python_opts in libregrtest/main.py.
|
| 876 |
+
"-W", "error", "-bb", "-E",
|
| 877 |
+
# Randomization is disabled because order-dependent failures are
|
| 878 |
+
# much less likely to pass on a rerun in single-process mode.
|
| 879 |
+
"-m", "test",
|
| 880 |
+
f"--{context.ci_mode}-ci", "--single-process", "--no-randomize"
|
| 881 |
+
]
|
| 882 |
+
if not any(arg in context.args for arg in ["-c", "-m"]):
|
| 883 |
+
context.args[0:0] = ["-m", "test"]
|
| 884 |
+
args = [
|
| 885 |
+
gradlew, "--console", "plain", f"{task_prefix}DebugAndroidTest",
|
| 886 |
+
] + [
|
| 887 |
+
f"-P{name}={value}"
|
| 888 |
+
for name, value in [
|
| 889 |
+
("python.sitePackages", context.site_packages),
|
| 890 |
+
("python.cwd", context.cwd),
|
| 891 |
+
(
|
| 892 |
+
"android.testInstrumentationRunnerArguments.pythonArgs",
|
| 893 |
+
json.dumps(context.args),
|
| 894 |
+
),
|
| 895 |
+
]
|
| 896 |
+
if value
|
| 897 |
+
]
|
| 898 |
+
if context.verbose >= 2:
|
| 899 |
+
args.append("--info")
|
| 900 |
+
log_verbose(context, f"> {join_command(args)}\n")
|
| 901 |
+
try:
|
| 902 |
+
async with async_process(
|
| 903 |
+
*args, cwd=TESTBED_DIR, env=env,
|
| 904 |
+
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
| 905 |
+
) as process:
|
| 906 |
+
while line := (await process.stdout.readline()).decode(*DECODE_ARGS):
|
| 907 |
+
# Gradle may take several minutes to install SDK packages, so
|
| 908 |
+
# it's worth showing those messages even in non-verbose mode.
|
| 909 |
+
if line.startswith('Preparing "Install'):
|
| 910 |
+
sys.stdout.write(line)
|
| 911 |
+
else:
|
| 912 |
+
log_verbose(context, line)
|
| 913 |
+
status = await wait_for(process.wait(), timeout=1)
|
| 914 |
+
if status == 0:
|
| 915 |
+
exit(0)
|
| 916 |
+
else:
|
| 917 |
+
raise CalledProcessError(status, args)
|
| 918 |
+
finally:
|
| 919 |
+
# Gradle does not stop the tests when interrupted.
|
| 920 |
+
if context.connected:
|
| 921 |
+
stop_app(context.connected)
|
| 922 |
+
async def run_testbed(context):
|
| 923 |
+
setup_ci()
|
| 924 |
+
setup_sdk()
|
| 925 |
+
setup_testbed()
|
| 926 |
+
if context.managed:
|
| 927 |
+
# In this mode, Gradle will create a device with an unpredictable name.
|
| 928 |
+
# So we save a list of the running devices before starting Gradle, and
|
| 929 |
+
# find_device then waits for a new device to appear.
|
| 930 |
+
initial_devices = await list_devices()
|
| 931 |
+
else:
|
| 932 |
+
# In case the previous shutdown was unclean, make sure the app isn't
|
| 933 |
+
# running, otherwise we might show logs from a previous run. This is
|
| 934 |
+
# unnecessary in --managed mode, because Gradle creates a new emulator
|
| 935 |
+
# every time.
|
| 936 |
+
stop_app(context.connected)
|
| 937 |
+
initial_devices = None
|
| 938 |
+
try:
|
| 939 |
+
async with asyncio.TaskGroup() as tg:
|
| 940 |
+
tg.create_task(logcat_task(context, initial_devices))
|
| 941 |
+
tg.create_task(gradle_task(context))
|
| 942 |
+
except* MySystemExit as e:
|
| 943 |
+
raise SystemExit(*e.exceptions[0].args) from None
|
| 944 |
+
except* CalledProcessError as e:
|
| 945 |
+
# If Python produced no output, then the user probably wants to see the
|
| 946 |
+
# verbose output to explain why the test failed.
|
| 947 |
+
if not python_started:
|
| 948 |
+
for stream, line in hidden_output:
|
| 949 |
+
stream.write(line)
|
| 950 |
+
# Extract it from the ExceptionGroup so it can be handled by `main`.
|
| 951 |
+
raise e.exceptions[0]
|
| 952 |
+
def package_version(prefix_dir):
|
| 953 |
+
patchlevel_glob = f"{prefix_dir}/include/python*/patchlevel.h"
|
| 954 |
+
patchlevel_paths = glob(patchlevel_glob)
|
| 955 |
+
if len(patchlevel_paths) != 1:
|
| 956 |
+
sys.exit(f"{patchlevel_glob} matched {len(patchlevel_paths)} paths.")
|
| 957 |
+
for line in open(patchlevel_paths[0]):
|
| 958 |
+
if match := re.fullmatch(r'\s*#define\s+PY_VERSION\s+"(.+)"\s*', line):
|
| 959 |
+
version = match[1]
|
| 960 |
+
break
|
| 961 |
+
else:
|
| 962 |
+
sys.exit(f"Failed to find Python version in {patchlevel_paths[0]}.")
|
| 963 |
+
# If not building against a tagged commit, add a timestamp to the version.
|
| 964 |
+
# Follow the PyPA version number rules, as this will make it easier to
|
| 965 |
+
# process with other tools.
|
| 966 |
+
if version.endswith("+"):
|
| 967 |
+
version += datetime.now(timezone.utc).strftime("%Y%m%d.%H%M%S")
|
| 968 |
+
return version
|
| 969 |
+
def package(context):
|
| 970 |
+
prefix_dir = subdir(context.host, "prefix")
|
| 971 |
+
version = package_version(prefix_dir)
|
| 972 |
+
with TemporaryDirectory(prefix=SCRIPT_NAME) as temp_dir:
|
| 973 |
+
temp_dir = Path(temp_dir)
|
| 974 |
+
# Include all tracked files from the Android directory.
|
| 975 |
+
for line in run(
|
| 976 |
+
["git", "ls-files"],
|
| 977 |
+
cwd=ANDROID_DIR, capture_output=True, text=True, log=False,
|
| 978 |
+
).stdout.splitlines():
|
| 979 |
+
src = ANDROID_DIR / line
|
| 980 |
+
dst = temp_dir / line
|
| 981 |
+
dst.parent.mkdir(parents=True, exist_ok=True)
|
| 982 |
+
shutil.copy2(src, dst, follow_symlinks=False)
|
| 983 |
+
# Include anything from the prefix directory which could be useful
|
| 984 |
+
# either for embedding Python in an app, or building third-party
|
| 985 |
+
# packages against it.
|
| 986 |
+
for rel_dir, patterns in [
|
| 987 |
+
("include", ["openssl*", "python*", "sqlite*"]),
|
| 988 |
+
("lib", ["engines-3", "libcrypto*.so", "libpython*", "libsqlite*",
|
| 989 |
+
"libssl*.so", "ossl-modules", "python*"]),
|
| 990 |
+
("lib/pkgconfig", ["*crypto*", "*ssl*", "*python*", "*sqlite*"]),
|
| 991 |
+
]:
|
| 992 |
+
for pattern in patterns:
|
| 993 |
+
for src in glob(f"{prefix_dir}/{rel_dir}/{pattern}"):
|
| 994 |
+
dst = temp_dir / relpath(src, prefix_dir.parent)
|
| 995 |
+
dst.parent.mkdir(parents=True, exist_ok=True)
|
| 996 |
+
if Path(src).is_dir():
|
| 997 |
+
shutil.copytree(
|
| 998 |
+
src, dst, symlinks=True,
|
| 999 |
+
ignore=lambda *args: ["__pycache__"]
|
| 1000 |
+
)
|
| 1001 |
+
else:
|
| 1002 |
+
shutil.copy2(src, dst, follow_symlinks=False)
|
| 1003 |
+
# Strip debug information.
|
| 1004 |
+
if not context.debug:
|
| 1005 |
+
so_files = glob(f"{temp_dir}/**/*.so", recursive=True)
|
| 1006 |
+
run([android_env(context.host)["STRIP"], *so_files], log=False)
|
| 1007 |
+
dist_dir = subdir(context.host, "dist", create=True)
|
| 1008 |
+
package_path = shutil.make_archive(
|
| 1009 |
+
f"{dist_dir}/python-{version}-{context.host}", "gztar", temp_dir
|
| 1010 |
+
)
|
| 1011 |
+
print(f"Wrote {package_path}")
|
| 1012 |
+
return package_path
|
| 1013 |
+
def ci(context):
|
| 1014 |
+
for step in [
|
| 1015 |
+
configure_build_python,
|
| 1016 |
+
make_build_python,
|
| 1017 |
+
configure_host_python,
|
| 1018 |
+
make_host_python,
|
| 1019 |
+
package,
|
| 1020 |
+
]:
|
| 1021 |
+
caption = (
|
| 1022 |
+
step.__name__.replace("_", " ")
|
| 1023 |
+
.capitalize()
|
| 1024 |
+
.replace("python", "Python")
|
| 1025 |
+
)
|
| 1026 |
+
print(f"::group::{caption}")
|
| 1027 |
+
result = step(context)
|
| 1028 |
+
if step is package:
|
| 1029 |
+
package_path = result
|
| 1030 |
+
print("::endgroup::")
|
| 1031 |
+
if (
|
| 1032 |
+
"GITHUB_ACTIONS" in os.environ
|
| 1033 |
+
and (platform.system(), platform.machine()) != ("Linux", "x86_64")
|
| 1034 |
+
):
|
| 1035 |
+
print(
|
| 1036 |
+
"Skipping tests: GitHub Actions does not support the Android "
|
| 1037 |
+
"emulator on this platform."
|
| 1038 |
+
)
|
| 1039 |
+
else:
|
| 1040 |
+
with TemporaryDirectory(prefix=SCRIPT_NAME) as temp_dir:
|
| 1041 |
+
print("::group::Tests")
|
| 1042 |
+
# Prove the package is self-contained by using it to run the tests.
|
| 1043 |
+
shutil.unpack_archive(package_path, temp_dir)
|
| 1044 |
+
launcher_args = [
|
| 1045 |
+
"--managed", "maxVersion", "-v", f"--{context.ci_mode}-ci"
|
| 1046 |
+
]
|
| 1047 |
+
run(
|
| 1048 |
+
["./android.py", "test", *launcher_args],
|
| 1049 |
+
cwd=temp_dir
|
| 1050 |
+
)
|
| 1051 |
+
print("::endgroup::")
|
| 1052 |
+
def env(context):
|
| 1053 |
+
print_env(android_env(getattr(context, "host", None)))
|
| 1054 |
+
# Handle SIGTERM the same way as SIGINT. This ensures that if we're terminated
|
| 1055 |
+
# by the buildbot worker, we'll make an attempt to clean up our subprocesses.
|
| 1056 |
+
def install_signal_handler():
|
| 1057 |
+
def signal_handler(*args):
|
| 1058 |
+
os.kill(os.getpid(), signal.SIGINT)
|
| 1059 |
+
signal.signal(signal.SIGTERM, signal_handler)
|
| 1060 |
+
def parse_args():
|
| 1061 |
+
parser = argparse.ArgumentParser()
|
| 1062 |
+
subcommands = parser.add_subparsers(dest="subcommand", required=True)
|
| 1063 |
+
def add_parser(*args, **kwargs):
|
| 1064 |
+
parser = subcommands.add_parser(*args, **kwargs)
|
| 1065 |
+
parser.add_argument(
|
| 1066 |
+
"-v", "--verbose", action="count", default=0,
|
| 1067 |
+
help="Show verbose output. Use twice to be even more verbose.")
|
| 1068 |
+
return parser
|
| 1069 |
+
# Subcommands
|
| 1070 |
+
build = add_parser(
|
| 1071 |
+
"build", help="Run configure-build, make-build, configure-host and "
|
| 1072 |
+
"make-host")
|
| 1073 |
+
configure_build = add_parser(
|
| 1074 |
+
"configure-build", help="Run `configure` for the build Python")
|
| 1075 |
+
add_parser(
|
| 1076 |
+
"make-build", help="Run `make` for the build Python")
|
| 1077 |
+
configure_host = add_parser(
|
| 1078 |
+
"configure-host", help="Run `configure` for Android")
|
| 1079 |
+
make_host = add_parser(
|
| 1080 |
+
"make-host", help="Run `make` for Android")
|
| 1081 |
+
add_parser("clean", help="Delete all build directories")
|
| 1082 |
+
add_parser("build-testbed", help="Build the testbed app")
|
| 1083 |
+
test = add_parser("test", help="Run the testbed app")
|
| 1084 |
+
package = add_parser("package", help="Make a release package")
|
| 1085 |
+
ci = add_parser("ci", help="Run build, package and test")
|
| 1086 |
+
env = add_parser("env", help="Print environment variables")
|
| 1087 |
+
# Common arguments
|
| 1088 |
+
for subcommand in [build, configure_build, configure_host, ci]:
|
| 1089 |
+
subcommand.add_argument(
|
| 1090 |
+
"--clean", action="store_true", default=False, dest="clean",
|
| 1091 |
+
help="Delete the relevant build directories first")
|
| 1092 |
+
host_commands = [build, configure_host, make_host, package, ci]
|
| 1093 |
+
if in_source_tree:
|
| 1094 |
+
host_commands.append(env)
|
| 1095 |
+
for subcommand in host_commands:
|
| 1096 |
+
subcommand.add_argument(
|
| 1097 |
+
"host", metavar="HOST", choices=HOSTS,
|
| 1098 |
+
help="Host triplet: choices=[%(choices)s]")
|
| 1099 |
+
for subcommand in [build, configure_build, configure_host, ci]:
|
| 1100 |
+
subcommand.add_argument("args", nargs="*",
|
| 1101 |
+
help="Extra arguments to pass to `configure`")
|
| 1102 |
+
# Test arguments
|
| 1103 |
+
device_group = test.add_mutually_exclusive_group(required=True)
|
| 1104 |
+
device_group.add_argument(
|
| 1105 |
+
"--connected", metavar="SERIAL", help="Run on a connected device. "
|
| 1106 |
+
"Connect it yourself, then get its serial from `adb devices`.")
|
| 1107 |
+
device_group.add_argument(
|
| 1108 |
+
"--managed", metavar="NAME", help="Run on a Gradle-managed device. "
|
| 1109 |
+
"These are defined in `managedDevices` in testbed/app/build.gradle.kts.")
|
| 1110 |
+
test.add_argument(
|
| 1111 |
+
"--site-packages", metavar="DIR", type=abspath,
|
| 1112 |
+
help="Directory to copy as the app's site-packages.")
|
| 1113 |
+
test.add_argument(
|
| 1114 |
+
"--cwd", metavar="DIR", type=abspath,
|
| 1115 |
+
help="Directory to copy as the app's working directory.")
|
| 1116 |
+
test.add_argument(
|
| 1117 |
+
"args", nargs="*", help=f"Python command-line arguments. "
|
| 1118 |
+
f"Separate them from {SCRIPT_NAME}'s own arguments with `--`. "
|
| 1119 |
+
f"If neither -c nor -m are included, `-m test` will be prepended, "
|
| 1120 |
+
f"which will run Python's own test suite.")
|
| 1121 |
+
# Package arguments.
|
| 1122 |
+
for subcommand in [package, ci]:
|
| 1123 |
+
subcommand.add_argument(
|
| 1124 |
+
"-g", action="store_true", default=False, dest="debug",
|
| 1125 |
+
help="Include debug information in package")
|
| 1126 |
+
# CI arguments
|
| 1127 |
+
for subcommand in [test, ci]:
|
| 1128 |
+
group = subcommand.add_mutually_exclusive_group(required=subcommand is ci)
|
| 1129 |
+
group.add_argument(
|
| 1130 |
+
"--fast-ci", action="store_const", dest="ci_mode", const="fast",
|
| 1131 |
+
help="Add test arguments for GitHub Actions")
|
| 1132 |
+
group.add_argument(
|
| 1133 |
+
"--slow-ci", action="store_const", dest="ci_mode", const="slow",
|
| 1134 |
+
help="Add test arguments for buildbots")
|
| 1135 |
+
return parser.parse_args()
|
| 1136 |
+
def main():
|
| 1137 |
+
install_signal_handler()
|
| 1138 |
+
# Under the buildbot, stdout is not a TTY, but we must still flush after
|
| 1139 |
+
# every line to make sure our output appears in the correct order relative
|
| 1140 |
+
# to the output of our subprocesses.
|
| 1141 |
+
for stream in [sys.stdout, sys.stderr]:
|
| 1142 |
+
stream.reconfigure(line_buffering=True)
|
| 1143 |
+
context = parse_args()
|
| 1144 |
+
dispatch = {
|
| 1145 |
+
"configure-build": configure_build_python,
|
| 1146 |
+
"make-build": make_build_python,
|
| 1147 |
+
"configure-host": configure_host_python,
|
| 1148 |
+
"make-host": make_host_python,
|
| 1149 |
+
"build": build_all,
|
| 1150 |
+
"clean": clean_all,
|
| 1151 |
+
"build-testbed": build_testbed,
|
| 1152 |
+
"test": run_testbed,
|
| 1153 |
+
"package": package,
|
| 1154 |
+
"ci": ci,
|
| 1155 |
+
"env": env,
|
| 1156 |
+
}
|
| 1157 |
+
try:
|
| 1158 |
+
result = dispatch[context.subcommand](context)
|
| 1159 |
+
if asyncio.iscoroutine(result):
|
| 1160 |
+
asyncio.run(result)
|
| 1161 |
+
except CalledProcessError as e:
|
| 1162 |
+
print_called_process_error(e)
|
| 1163 |
+
sys.exit(1)
|
| 1164 |
+
def print_called_process_error(e):
|
| 1165 |
+
for stream_name in ["stdout", "stderr"]:
|
| 1166 |
+
content = getattr(e, stream_name)
|
| 1167 |
+
if isinstance(content, bytes):
|
| 1168 |
+
content = content.decode(*DECODE_ARGS)
|
| 1169 |
+
stream = getattr(sys, stream_name)
|
| 1170 |
+
if content:
|
| 1171 |
+
stream.write(content)
|
| 1172 |
+
if not content.endswith("\n"):
|
| 1173 |
+
stream.write("\n")
|
| 1174 |
+
# shlex uses single quotes, so we surround the command with double quotes.
|
| 1175 |
+
print(
|
| 1176 |
+
f'Command "{join_command(e.cmd)}" returned exit status {e.returncode}'
|
| 1177 |
+
)
|
| 1178 |
+
if __name__ == "__main__":
|
| 1179 |
+
main()
|
| 1180 |
+
|
| 1181 |
+
# Python for Android
|
| 1182 |
+
If you obtained this README as part of a release package, then the only
|
| 1183 |
+
applicable sections are "Prerequisites", "Testing", and "Using in your own app".
|
| 1184 |
+
If you obtained this README as part of the CPython source tree, then you can
|
| 1185 |
+
also follow the other sections to compile Python for Android yourself.
|
| 1186 |
+
However, most app developers should not need to do any of these things manually.
|
| 1187 |
+
Instead, use one of the tools listed
|
| 1188 |
+
[here](https://docs.python.org/3/using/android.html), which will provide a much
|
| 1189 |
+
easier experience.
|
| 1190 |
+
## Prerequisites
|
| 1191 |
+
If you already have an Android SDK installed, export the `ANDROID_HOME`
|
| 1192 |
+
environment variable to point at its location. Otherwise, here's how to install
|
| 1193 |
+
it:
|
| 1194 |
+
* Download the "Command line tools" from <https://developer.android.com/studio>.
|
| 1195 |
+
* Create a directory `android-sdk/cmdline-tools`, and unzip the command line
|
| 1196 |
+
tools package into it.
|
| 1197 |
+
* Rename `android-sdk/cmdline-tools/cmdline-tools` to
|
| 1198 |
+
`android-sdk/cmdline-tools/latest`.
|
| 1199 |
+
* `export ANDROID_HOME=/path/to/android-sdk`
|
| 1200 |
+
The `android.py` script will automatically use the SDK's `sdkmanager` to install
|
| 1201 |
+
any packages it needs.
|
| 1202 |
+
The script also requires the following commands to be on the `PATH`:
|
| 1203 |
+
* `curl`
|
| 1204 |
+
* `java` (or set the `JAVA_HOME` environment variable)
|
| 1205 |
+
## Building
|
| 1206 |
+
Python can be built for Android on any POSIX platform supported by the Android
|
| 1207 |
+
development tools, which currently means Linux or macOS.
|
| 1208 |
+
First we'll make a "build" Python (for your development machine), then use it to
|
| 1209 |
+
help produce a "host" Python for Android. So make sure you have all the usual
|
| 1210 |
+
tools and libraries needed to build Python for your development machine.
|
| 1211 |
+
The easiest way to do a build is to use the `android.py` script. You can either
|
| 1212 |
+
have it perform the entire build process from start to finish in one step, or
|
| 1213 |
+
you can do it in discrete steps that mirror running `configure` and `make` for
|
| 1214 |
+
each of the two builds of Python you end up producing.
|
| 1215 |
+
The discrete steps for building via `android.py` are:
|
| 1216 |
+
```sh
|
| 1217 |
+
./android.py configure-build
|
| 1218 |
+
./android.py make-build
|
| 1219 |
+
./android.py configure-host HOST
|
| 1220 |
+
./android.py make-host HOST
|
| 1221 |
+
```
|
| 1222 |
+
`HOST` identifies which architecture to build. To see the possible values, run
|
| 1223 |
+
`./android.py configure-host --help`.
|
| 1224 |
+
To do all steps in a single command, run:
|
| 1225 |
+
```sh
|
| 1226 |
+
./android.py build HOST
|
| 1227 |
+
```
|
| 1228 |
+
In the end you should have a build Python in `cross-build/build`, and a host
|
| 1229 |
+
Python in `cross-build/HOST`.
|
| 1230 |
+
You can use `--` as a separator for any of the `configure`-related commands –
|
| 1231 |
+
including `build` itself – to pass arguments to the underlying `configure`
|
| 1232 |
+
call. For example, if you want a pydebug build that also caches the results from
|
| 1233 |
+
`configure`, you can do:
|
| 1234 |
+
```sh
|
| 1235 |
+
./android.py build HOST -- -C --with-pydebug
|
| 1236 |
+
```
|
| 1237 |
+
## Packaging
|
| 1238 |
+
After building an architecture as described in the section above, you can
|
| 1239 |
+
package it for release with this command:
|
| 1240 |
+
```sh
|
| 1241 |
+
./android.py package HOST
|
| 1242 |
+
```
|
| 1243 |
+
`HOST` is defined in the section above.
|
| 1244 |
+
This will generate a tarball in `cross-build/HOST/dist`, whose structure is
|
| 1245 |
+
similar to the `Android` directory of the CPython source tree.
|
| 1246 |
+
## Testing
|
| 1247 |
+
The Python test suite can be run on Linux, macOS, or Windows.
|
| 1248 |
+
On Linux, the emulator needs access to the KVM virtualization interface. This may
|
| 1249 |
+
require adding your user to a group, or changing your udev rules. On GitHub
|
| 1250 |
+
Actions, the test script will do this automatically using the commands shown
|
| 1251 |
+
[here](https://github.blog/changelog/2024-04-02-github-actions-hardware-accelerated-android-virtualization-now-available/).
|
| 1252 |
+
The test suite can usually be run on a device with 2 GB of RAM, but this is
|
| 1253 |
+
borderline, so you may need to increase it to 4 GB. As of Android
|
| 1254 |
+
Studio Koala, 2 GB is the default for all emulators, although the user interface
|
| 1255 |
+
may indicate otherwise. Locate the emulator's directory under `~/.android/avd`,
|
| 1256 |
+
and find `hw.ramSize` in both config.ini and hardware-qemu.ini. Either set these
|
| 1257 |
+
manually to the same value, or use the Android Studio Device Manager, which will
|
| 1258 |
+
update both files.
|
| 1259 |
+
You can run the test suite either:
|
| 1260 |
+
* Within the CPython repository, after doing a build as described above. On
|
| 1261 |
+
Windows, you won't be able to do the build on the same machine, so you'll have
|
| 1262 |
+
to copy the `cross-build/HOST/prefix` directory from somewhere else.
|
| 1263 |
+
* Or by taking a release package built using the `package` command, extracting
|
| 1264 |
+
it wherever you want, and using its own copy of `android.py`.
|
| 1265 |
+
The test script supports the following modes:
|
| 1266 |
+
* In `--connected` mode, it runs on a device or emulator you have already
|
| 1267 |
+
connected to the build machine. List the available devices with
|
| 1268 |
+
`$ANDROID_HOME/platform-tools/adb devices -l`, then pass a device ID to the
|
| 1269 |
+
script like this:
|
| 1270 |
+
```sh
|
| 1271 |
+
./android.py test --connected emulator-5554
|
| 1272 |
+
```
|
| 1273 |
+
* In `--managed` mode, it uses a temporary headless emulator defined in the
|
| 1274 |
+
`managedDevices` section of testbed/app/build.gradle.kts. This mode is slower,
|
| 1275 |
+
but more reproducible.
|
| 1276 |
+
We currently define two devices: `minVersion` and `maxVersion`, corresponding
|
| 1277 |
+
to our minimum and maximum supported Android versions. For example:
|
| 1278 |
+
```sh
|
| 1279 |
+
./android.py test --managed maxVersion
|
| 1280 |
+
```
|
| 1281 |
+
By default, the only messages the script will show are Python's own stdout and
|
| 1282 |
+
stderr. Add the `-v` option to also show Gradle output, and non-Python logcat
|
| 1283 |
+
messages.
|
| 1284 |
+
Any other arguments on the `android.py test` command line will be passed through
|
| 1285 |
+
to `python -m test` – use `--` to separate them from android.py's own options.
|
| 1286 |
+
See the [Python Developer's
|
| 1287 |
+
Guide](https://devguide.python.org/testing/run-write-tests/) for common options
|
| 1288 |
+
– most of them will work on Android, except for those that involve subprocesses,
|
| 1289 |
+
such as `-j`.
|
| 1290 |
+
Every time you run `android.py test`, changes in pure-Python files in the
|
| 1291 |
+
repository's `Lib` directory will be picked up immediately. Changes in C files,
|
| 1292 |
+
and architecture-specific files such as sysconfigdata, will not take effect
|
| 1293 |
+
until you re-run `android.py make-host` or `build`.
|
| 1294 |
+
The testbed app can also be used to test third-party packages. For more details,
|
| 1295 |
+
run `android.py test --help`, paying attention to the options `--site-packages`,
|
| 1296 |
+
`--cwd`, `-c` and `-m`.
|
| 1297 |
+
## Using in your own app
|
| 1298 |
+
See https://docs.python.org/3/using/android.html.
|
| 1299 |
+
|
| 1300 |
+
#include <android/log.h>
|
| 1301 |
+
#include <errno.h>
|
| 1302 |
+
#include <jni.h>
|
| 1303 |
+
#include <pthread.h>
|
| 1304 |
+
#include <Python.h>
|
| 1305 |
+
#include <signal.h>
|
| 1306 |
+
#include <stdio.h>
|
| 1307 |
+
#include <string.h>
|
| 1308 |
+
#include <unistd.h>
|
| 1309 |
+
static void throw_runtime_exception(JNIEnv *env, const char *message) {
|
| 1310 |
+
(*env)->ThrowNew(
|
| 1311 |
+
env,
|
| 1312 |
+
(*env)->FindClass(env, "java/lang/RuntimeException"),
|
| 1313 |
+
message);
|
| 1314 |
+
}
|
| 1315 |
+
static void throw_errno(JNIEnv *env, const char *error_prefix) {
|
| 1316 |
+
char error_message[1024];
|
| 1317 |
+
snprintf(error_message, sizeof(error_message),
|
| 1318 |
+
"%s: %s", error_prefix, strerror(errno));
|
| 1319 |
+
throw_runtime_exception(env, error_message);
|
| 1320 |
+
}
|
| 1321 |
+
// --- Stdio redirection ------------------------------------------------------
|
| 1322 |
+
// Most apps won't need this, because the Python-level sys.stdout and sys.stderr
|
| 1323 |
+
// are redirected to the Android logcat by Python itself. However, in the
|
| 1324 |
+
// testbed it's useful to redirect the native streams as well, to debug problems
|
| 1325 |
+
// in the Python startup or redirection process.
|
| 1326 |
+
//
|
| 1327 |
+
// Based on
|
| 1328 |
+
// https://github.com/beeware/briefcase-android-gradle-template/blob/v0.3.11/%7B%7B%20cookiecutter.safe_formal_name%20%7D%7D/app/src/main/cpp/native-lib.cpp
|
| 1329 |
+
typedef struct {
|
| 1330 |
+
FILE *file;
|
| 1331 |
+
int fd;
|
| 1332 |
+
android_LogPriority priority;
|
| 1333 |
+
char *tag;
|
| 1334 |
+
int pipe[2];
|
| 1335 |
+
} StreamInfo;
|
| 1336 |
+
// The FILE member can't be initialized here because stdout and stderr are not
|
| 1337 |
+
// compile-time constants. Instead, it's initialized immediately before the
|
| 1338 |
+
// redirection.
|
| 1339 |
+
static StreamInfo STREAMS[] = {
|
| 1340 |
+
{NULL, STDOUT_FILENO, ANDROID_LOG_INFO, "native.stdout", {-1, -1}},
|
| 1341 |
+
{NULL, STDERR_FILENO, ANDROID_LOG_WARN, "native.stderr", {-1, -1}},
|
| 1342 |
+
{NULL, -1, ANDROID_LOG_UNKNOWN, NULL, {-1, -1}},
|
| 1343 |
+
};
|
| 1344 |
+
// The maximum length of a log message in bytes, including the level marker and
|
| 1345 |
+
// tag, is defined as LOGGER_ENTRY_MAX_PAYLOAD in
|
| 1346 |
+
// platform/system/logging/liblog/include/log/log.h. As of API level 30, messages
|
| 1347 |
+
// longer than this will be be truncated by logcat. This limit has already been
|
| 1348 |
+
// reduced at least once in the history of Android (from 4076 to 4068 between API
|
| 1349 |
+
// level 23 and 26), so leave some headroom.
|
| 1350 |
+
static const int MAX_BYTES_PER_WRITE = 4000;
|
| 1351 |
+
static void *redirection_thread(void *arg) {
|
| 1352 |
+
StreamInfo *si = (StreamInfo*)arg;
|
| 1353 |
+
ssize_t read_size;
|
| 1354 |
+
char buf[MAX_BYTES_PER_WRITE];
|
| 1355 |
+
while ((read_size = read(si->pipe[0], buf, sizeof buf - 1)) > 0) {
|
| 1356 |
+
buf[read_size] = '\0'; /* add null-terminator */
|
| 1357 |
+
__android_log_write(si->priority, si->tag, buf);
|
| 1358 |
+
}
|
| 1359 |
+
return 0;
|
| 1360 |
+
}
|
| 1361 |
+
static char *redirect_stream(StreamInfo *si) {
|
| 1362 |
+
/* make the FILE unbuffered, to ensure messages are never lost */
|
| 1363 |
+
if (setvbuf(si->file, 0, _IONBF, 0)) {
|
| 1364 |
+
return "setvbuf";
|
| 1365 |
+
}
|
| 1366 |
+
/* create the pipe and redirect the file descriptor */
|
| 1367 |
+
if (pipe(si->pipe)) {
|
| 1368 |
+
return "pipe";
|
| 1369 |
+
}
|
| 1370 |
+
if (dup2(si->pipe[1], si->fd) == -1) {
|
| 1371 |
+
return "dup2";
|
| 1372 |
+
}
|
| 1373 |
+
/* start the logging thread */
|
| 1374 |
+
pthread_t thr;
|
| 1375 |
+
if ((errno = pthread_create(&thr, 0, redirection_thread, si))) {
|
| 1376 |
+
return "pthread_create";
|
| 1377 |
+
}
|
| 1378 |
+
if ((errno = pthread_detach(thr))) {
|
| 1379 |
+
return "pthread_detach";
|
| 1380 |
+
}
|
| 1381 |
+
return 0;
|
| 1382 |
+
}
|
| 1383 |
+
JNIEXPORT void JNICALL Java_org_python_testbed_PythonTestRunner_redirectStdioToLogcat(
|
| 1384 |
+
JNIEnv *env, jobject obj
|
| 1385 |
+
) {
|
| 1386 |
+
STREAMS[0].file = stdout;
|
| 1387 |
+
STREAMS[1].file = stderr;
|
| 1388 |
+
for (StreamInfo *si = STREAMS; si->file; si++) {
|
| 1389 |
+
char *error_prefix;
|
| 1390 |
+
if ((error_prefix = redirect_stream(si))) {
|
| 1391 |
+
throw_errno(env, error_prefix);
|
| 1392 |
+
return;
|
| 1393 |
+
}
|
| 1394 |
+
}
|
| 1395 |
+
}
|
| 1396 |
+
// --- Python initialization ---------------------------------------------------
|
| 1397 |
+
static char *init_signals() {
|
| 1398 |
+
// Some tests use SIGUSR1, but that's blocked by default in an Android app in
|
| 1399 |
+
// order to make it available to `sigwait` in the Signal Catcher thread.
|
| 1400 |
+
// (https://cs.android.com/android/platform/superproject/+/android14-qpr3-release:art/runtime/signal_catcher.cc).
|
| 1401 |
+
// That thread's functionality is only useful for debugging the JVM, so disabling
|
| 1402 |
+
// it should not weaken the tests.
|
| 1403 |
+
//
|
| 1404 |
+
// There's no safe way of stopping the thread completely (#123982), but simply
|
| 1405 |
+
// unblocking SIGUSR1 is enough to fix most tests.
|
| 1406 |
+
//
|
| 1407 |
+
// However, in tests that generate multiple different signals in quick
|
| 1408 |
+
// succession, it's possible for SIGUSR1 to arrive while the main thread is busy
|
| 1409 |
+
// running the C-level handler for a different signal. In that case, the SIGUSR1
|
| 1410 |
+
// may be sent to the Signal Catcher thread instead, which will generate a log
|
| 1411 |
+
// message containing the text "reacting to signal".
|
| 1412 |
+
//
|
| 1413 |
+
// Such tests may need to be changed in one of the following ways:
|
| 1414 |
+
// * Use a signal other than SIGUSR1 (e.g. test_stress_delivery_simultaneous in
|
| 1415 |
+
// test_signal.py).
|
| 1416 |
+
// * Send the signal to a specific thread rather than the whole process (e.g.
|
| 1417 |
+
// test_signals in test_threadsignals.py.
|
| 1418 |
+
sigset_t set;
|
| 1419 |
+
if (sigemptyset(&set)) {
|
| 1420 |
+
return "sigemptyset";
|
| 1421 |
+
}
|
| 1422 |
+
if (sigaddset(&set, SIGUSR1)) {
|
| 1423 |
+
return "sigaddset";
|
| 1424 |
+
}
|
| 1425 |
+
if ((errno = pthread_sigmask(SIG_UNBLOCK, &set, NULL))) {
|
| 1426 |
+
return "pthread_sigmask";
|
| 1427 |
+
}
|
| 1428 |
+
return NULL;
|
| 1429 |
+
}
|
| 1430 |
+
static void throw_status(JNIEnv *env, PyStatus status) {
|
| 1431 |
+
throw_runtime_exception(env, status.err_msg ? status.err_msg : "");
|
| 1432 |
+
}
|
| 1433 |
+
JNIEXPORT int JNICALL Java_org_python_testbed_PythonTestRunner_runPython(
|
| 1434 |
+
JNIEnv *env, jobject obj, jstring home, jarray args
|
| 1435 |
+
) {
|
| 1436 |
+
const char *home_utf8 = (*env)->GetStringUTFChars(env, home, NULL);
|
| 1437 |
+
char cwd[PATH_MAX];
|
| 1438 |
+
snprintf(cwd, sizeof(cwd), "%s/%s", home_utf8, "cwd");
|
| 1439 |
+
if (chdir(cwd)) {
|
| 1440 |
+
throw_errno(env, "chdir");
|
| 1441 |
+
return 1;
|
| 1442 |
+
}
|
| 1443 |
+
char *error_prefix;
|
| 1444 |
+
if ((error_prefix = init_signals())) {
|
| 1445 |
+
throw_errno(env, error_prefix);
|
| 1446 |
+
return 1;
|
| 1447 |
+
}
|
| 1448 |
+
PyConfig config;
|
| 1449 |
+
PyStatus status;
|
| 1450 |
+
PyConfig_InitPythonConfig(&config);
|
| 1451 |
+
jsize argc = (*env)->GetArrayLength(env, args);
|
| 1452 |
+
const char *argv[argc + 1];
|
| 1453 |
+
for (int i = 0; i < argc; i++) {
|
| 1454 |
+
jobject arg = (*env)->GetObjectArrayElement(env, args, i);
|
| 1455 |
+
argv[i] = (*env)->GetStringUTFChars(env, arg, NULL);
|
| 1456 |
+
}
|
| 1457 |
+
argv[argc] = NULL;
|
| 1458 |
+
// PyConfig_SetBytesArgv "must be called before other methods, since the
|
| 1459 |
+
// preinitialization configuration depends on command line arguments"
|
| 1460 |
+
if (PyStatus_Exception(status = PyConfig_SetBytesArgv(&config, argc, (char**)argv))) {
|
| 1461 |
+
throw_status(env, status);
|
| 1462 |
+
return 1;
|
| 1463 |
+
}
|
| 1464 |
+
status = PyConfig_SetBytesString(&config, &config.home, home_utf8);
|
| 1465 |
+
if (PyStatus_Exception(status)) {
|
| 1466 |
+
throw_status(env, status);
|
| 1467 |
+
return 1;
|
| 1468 |
+
}
|
| 1469 |
+
status = Py_InitializeFromConfig(&config);
|
| 1470 |
+
if (PyStatus_Exception(status)) {
|
| 1471 |
+
throw_status(env, status);
|
| 1472 |
+
return 1;
|
| 1473 |
+
}
|
| 1474 |
+
return Py_RunMain();
|
| 1475 |
+
}
|
| 1476 |
+
|
| 1477 |
+
cmake_minimum_required(VERSION 3.4.1)
|
| 1478 |
+
project(testbed)
|
| 1479 |
+
# Resolve variables from the command line.
|
| 1480 |
+
string(
|
| 1481 |
+
REPLACE {{triplet}} ${CMAKE_LIBRARY_ARCHITECTURE}
|
| 1482 |
+
PYTHON_PREFIX_DIR ${PYTHON_PREFIX_DIR}
|
| 1483 |
+
)
|
| 1484 |
+
include_directories(${PYTHON_PREFIX_DIR}/include/python${PYTHON_VERSION})
|
| 1485 |
+
link_directories(${PYTHON_PREFIX_DIR}/lib)
|
| 1486 |
+
link_libraries(log python${PYTHON_VERSION})
|
| 1487 |
+
add_library(main_activity SHARED main_activity.c)
|
| 1488 |
+
|
| 1489 |
+
|
| 1490 |
+
"""
|
cpython_chunk_11.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
cpython_chunk_13.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
cpython_chunk_15.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
cpython_chunk_18.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
cpython_chunk_2.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
cpython_chunk_24.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|