Use Cycles rendering with all GPU

OPTIX is used.

# Setup CYCLE
 
bpy.data.scenes[0].render.engine = "CYCLES"
 
# Set the device_type
bpy.context.preferences.addons["cycles"].preferences.compute_device_type = "OPTIX" # or "OPENCL"
 
# Set the device and feature set
bpy.context.scene.cycles.device = "GPU"
bpy.context.scene.cycles.feature_set = "SUPPORTED"
 
# get_devices() to let Blender detects GPU device
bpy.context.preferences.addons["cycles"].preferences.get_devices()
print(bpy.context.preferences.addons["cycles"].preferences.compute_device_type)
for d in bpy.context.preferences.addons["cycles"].preferences.devices:
    if 'Intel' in d['name']:
        continue
    if 'CPU' in d['name']:
        continue
    d["use"] = 1 # Using all devices
    print(d["name"], d["use"])

Enable addons with BPY

bpy.ops.preferences.addon_enable(module = 'sun_position')

Check if objects are overlapping.

Credit Utilize BVH tree.

import bpy, bmesh
from mathutils.bvhtree import BVHTree
 
 
#show objects that are intersecting
def intersection_check():
    #check every object for intersection with every other object
    for obj_now in obj_list:
        for obj_next in obj_list:
            print()
            if obj_now == obj_next:
                continue
 
            #create bmesh objects
            bm1 = bmesh.new()
            bm2 = bmesh.new()
            #fill bmesh data from objects
            bm1.from_mesh(scene.objects[obj_now].data)
            bm2.from_mesh(scene.objects[obj_next].data)            
 
            #fixed it here:
            bm1.transform(scene.objects[obj_now].matrix_world)
            bm2.transform(scene.objects[obj_next].matrix_world) 
 
            #make BVH tree from BMesh of objects
            obj_now_BVHtree = BVHTree.FromBMesh(bm1)
            obj_next_BVHtree = BVHTree.FromBMesh(bm2)           
 
            #get intersecting pairs
            inter = obj_now_BVHtree.overlap(obj_next_BVHtree)
 
            print("i got this far 1")
 
            #if list is empty, no objects are touching
            if inter != []:
                print(obj_now + " and " + obj_next + " are touching!")
            else:
                print(obj_now + " and " + obj_next + " NOT touching!")
 
# use active scene
scene =  bpy.context.scene
 
# define object list    
obj_list = ['Ico', 'Cube']
 
# Run it      
intersection_check()

Get object bounding box in screen space

Also useful to convert object_space world_space camera_space screen_space

import bpy
import mathutils
import bpy_extras
 
# Get the current scene and the active camera
scene = bpy.context.scene
camera = scene.camera
 
# Get the selected object
obj = bpy.context.active_object
 
# Ensure that object is selected
if obj is None:
    raise ValueError("No active object selected")
 
# Get the object’s bounding box
bbox = [mathutils.Vector(corner) for corner in obj.bound_box]
 
# Get the world matrix of the object
matrix_world = obj.matrix_world
 
# Project the bounding box into screen space
def world_to_camera_view(scene, obj, coord):
    # Convert the world space coordinates to camera space
    co_local = matrix_world @ coord
    co_ndc = bpy_extras.object_utils.world_to_camera_view(scene, camera, co_local)
    
    # co_ndc is normalized device coordinates (range 0.0 to 1.0)
    # Multiply by the width and height of the render resolution to get the screen position
    render_scale = scene.render.resolution_percentage / 100
    render_size = (
        scene.render.resolution_x * render_scale,
        scene.render.resolution_y * render_scale
    )
    
    screen_x = co_ndc.x * render_size[0]
    screen_y = (1.0 - co_ndc.y) * render_size[1]  # flip y coordinate
    
    return screen_x, screen_y
 
# Loop through the bounding box and project each corner
screen_positions = [world_to_camera_view(scene, obj, corner) for corner in bbox]
 
# Print or use the screen positions
for idx, pos in enumerate(screen_positions):
    print(f"Corner {idx}: Screen Position: {pos}")
 
 

Gamma correction

For computer display

np.where(img<=0.0031308, 12.92*img, 1.055*np.power(img, 1/2.4) - 0.055)