platform_system_sepolicy/public/swcodec_service_server.te

41 lines
1.7 KiB
Text
Raw Normal View History

# Add hal_codec2_hwservice to mediaswcodec_server
allow mediaswcodec_server hal_codec2_hwservice:hwservice_manager { add find };
allow mediaswcodec_server hidl_base_hwservice:hwservice_manager add;
# Allow mediaswcodec_server access to composer sync fences
allow mediaswcodec_server hal_graphics_composer:fd use;
allow mediaswcodec_server ion_device:chr_file r_file_perms;
allow mediaswcodec_server hal_camera:fd use;
crash_dump_fallback(mediaswcodec_server)
# Recieve gralloc buffer FDs from bufferhubd. Note that mediaswcodec_server never
# directly connects to bufferhubd via PDX. Instead, a VR app acts as a bridge
# between those two: it talks to hal_omx_server via Binder and talks to bufferhubd
# via PDX. Thus, there is no need to use pdx_client macro.
allow mediaswcodec_server bufferhubd:fd use;
binder_call(mediaswcodec_server, hal_omx_client)
binder_call(hal_omx_client, mediaswcodec_server)
###
### neverallow rules
###
# mediaswcodec_server should never execute any executable without a
# domain transition
neverallow mediaswcodec_server { file_type fs_type }:file execute_no_trans;
# The goal of the mediaserver/codec split is to place media processing code into
# restrictive sandboxes with limited responsibilities and thus limited
# permissions. Example: Audioserver is only responsible for controlling audio
# hardware and processing audio content. Cameraserver does the same for camera
# hardware/content. Etc.
#
# Media processing code is inherently risky and thus should have limited
# permissions and be isolated from the rest of the system and network.
# Lengthier explanation here:
# https://android-developers.googleblog.com/2016/05/hardening-media-stack.html
neverallow mediaswcodec_server domain:{ tcp_socket udp_socket rawip_socket } *;